Files
SanatiLeads/app/api/analyze/route.ts

209 lines
6.1 KiB
TypeScript

import { NextRequest, NextResponse } from 'next/server'
import { convexAuthNextjsToken, isAuthenticatedNextjs } from "@convex-dev/auth/nextjs/server";
import { fetchMutation } from "convex/nextjs";
import { api } from "@/convex/_generated/api";
import { z } from 'zod'
import { scrapeWebsite, ScrapingError } from '@/lib/scraper'
import { performDeepAnalysis } from '@/lib/analysis-pipeline'
const bodySchema = z.object({
url: z.string().min(1),
jobId: z.optional(z.string())
})
export async function POST(request: NextRequest) {
let jobId: string | undefined
let timeline: {
key: string
label: string
status: "pending" | "running" | "completed" | "failed"
detail?: string
}[] = []
try {
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
const nextPath = referer ? new URL(referer).pathname + new URL(referer).search : "/";
redirectUrl.searchParams.set("next", nextPath);
return NextResponse.redirect(redirectUrl);
}
const body = await request.json()
const parsed = bodySchema.parse(body)
const { url } = parsed
jobId = parsed.jobId
const token = await convexAuthNextjsToken();
timeline = [
{ key: "scrape", label: "Scrape website", status: "pending" },
{ key: "features", label: "Pass 1: Features", status: "pending" },
{ key: "competitors", label: "Pass 2: Competitors", status: "pending" },
{ key: "keywords", label: "Pass 3: Keywords", status: "pending" },
{ key: "problems", label: "Pass 4: Problems & Personas", status: "pending" },
{ key: "useCases", label: "Pass 5: Use cases", status: "pending" },
{ key: "dorkQueries", label: "Pass 6: Dork queries", status: "pending" },
{ key: "finalize", label: "Finalize analysis", status: "pending" },
]
const updateTimeline = async ({
key,
status,
detail,
progress,
finalStatus,
}: {
key: string
status: "pending" | "running" | "completed" | "failed"
detail?: string
progress?: number
finalStatus?: "running" | "completed" | "failed"
}) => {
if (!jobId) return
timeline = timeline.map((item) =>
item.key === key ? { ...item, status, detail: detail ?? item.detail } : item
)
await fetchMutation(
api.analysisJobs.update,
{
jobId: jobId as any,
status: finalStatus || "running",
progress,
stage: key,
timeline,
},
{ token }
)
}
if (jobId) {
await updateTimeline({ key: "scrape", status: "running", progress: 10 })
}
if (!process.env.OPENAI_API_KEY) {
if (jobId) {
await fetchMutation(
api.analysisJobs.update,
{
jobId: jobId as any,
status: "failed",
error: "OpenAI API key not configured",
timeline: timeline.map((item) =>
item.status === "running" ? { ...item, status: "failed" } : item
),
},
{ token }
);
}
return NextResponse.json(
{ error: 'OpenAI API key not configured' },
{ status: 500 }
)
}
console.log(`🌐 Scraping: ${url}`)
const scrapedContent = await scrapeWebsite(url)
console.log(` ✓ Scraped ${scrapedContent.headings.length} headings, ${scrapedContent.paragraphs.length} paragraphs`)
if (jobId) {
await updateTimeline({
key: "scrape",
status: "completed",
detail: `${scrapedContent.headings.length} headings, ${scrapedContent.paragraphs.length} paragraphs`,
progress: 20,
})
}
console.log('🤖 Starting enhanced analysis...')
const progressMap: Record<string, number> = {
features: 35,
competitors: 50,
keywords: 65,
problems: 78,
useCases: 88,
dorkQueries: 95,
}
const analysis = await performDeepAnalysis(scrapedContent, async (update) => {
await updateTimeline({
key: update.key,
status: update.status,
detail: update.detail,
progress: progressMap[update.key] ?? 80,
})
})
console.log(` ✓ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords, ${analysis.dorkQueries.length} queries`)
if (jobId) {
await updateTimeline({
key: "finalize",
status: "running",
progress: 98,
})
}
if (jobId) {
await updateTimeline({
key: "finalize",
status: "completed",
progress: 100,
finalStatus: "completed",
})
}
return NextResponse.json({
success: true,
data: analysis,
stats: {
features: analysis.features.length,
keywords: analysis.keywords.length,
personas: analysis.personas.length,
useCases: analysis.useCases.length,
competitors: analysis.competitors.length,
dorkQueries: analysis.dorkQueries.length
}
})
} catch (error: any) {
console.error('❌ Analysis error:', error)
if (jobId) {
try {
const token = await convexAuthNextjsToken();
await fetchMutation(
api.analysisJobs.update,
{
jobId: jobId as any,
status: "failed",
error: error.message || "Analysis failed",
timeline: timeline.map((item) =>
item.status === "running" ? { ...item, status: "failed" } : item
),
},
{ token }
);
} catch {
// Best-effort job update only.
}
}
if (error instanceof ScrapingError) {
return NextResponse.json(
{
error: error.message,
code: error.code,
needsManualInput: true
},
{ status: 400 }
)
}
if (error.name === 'ZodError') {
return NextResponse.json(
{ error: 'Invalid URL provided' },
{ status: 400 }
)
}
return NextResponse.json(
{ error: error.message || 'Failed to analyze website' },
{ status: 500 }
)
}
}