This commit is contained in:
2026-02-04 12:51:41 +00:00
parent 4fdbfb0fb3
commit f1e13f87f6
19 changed files with 722 additions and 67 deletions

View File

@@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api";
import { z } from 'zod'
import { analyzeFromText } from '@/lib/scraper'
import { performDeepAnalysis } from '@/lib/analysis-pipeline'
import { logServer } from "@/lib/server-logger";
const bodySchema = z.object({
productName: z.string().min(1),
@@ -22,6 +23,7 @@ export async function POST(request: NextRequest) {
detail?: string
}[] = []
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -100,7 +102,14 @@ export async function POST(request: NextRequest) {
)
}
console.log('📝 Creating content from manual input...')
await logServer({
level: "info",
message: "Preparing manual input for analysis",
labels: ["api", "analyze-manual", "scrape"],
payload: { productName },
requestId,
source: "api/analyze-manual",
});
const scrapedContent = await analyzeFromText(productName, description, features)
if (jobId) {
await updateTimeline({
@@ -111,7 +120,13 @@ export async function POST(request: NextRequest) {
})
}
console.log('🤖 Starting enhanced analysis...')
await logServer({
level: "info",
message: "Starting enhanced analysis",
labels: ["api", "analyze-manual", "analysis"],
requestId,
source: "api/analyze-manual",
});
const progressMap: Record<string, number> = {
features: 35,
competitors: 50,
@@ -128,7 +143,17 @@ export async function POST(request: NextRequest) {
progress: progressMap[update.key] ?? 80,
})
})
console.log(` ✓ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords`)
await logServer({
level: "info",
message: "Analysis complete",
labels: ["api", "analyze-manual", "analysis"],
payload: {
features: analysis.features.length,
keywords: analysis.keywords.length,
},
requestId,
source: "api/analyze-manual",
});
if (jobId) {
await updateTimeline({
key: "finalize",
@@ -186,7 +211,14 @@ export async function POST(request: NextRequest) {
persisted = true
}
} catch (persistError) {
console.error("Failed to persist manual analysis:", persistError)
await logServer({
level: "error",
message: "Failed to persist manual analysis",
labels: ["api", "analyze-manual", "persist", "error"],
payload: { error: String(persistError) },
requestId,
source: "api/analyze-manual",
});
}
}
@@ -205,7 +237,17 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Manual analysis error:', error)
await logServer({
level: "error",
message: "Manual analysis error",
labels: ["api", "analyze-manual", "error"],
payload: {
message: error?.message,
stack: error?.stack,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/analyze-manual",
});
if (jobId) {
try {

View File

@@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api";
import { z } from 'zod'
import { scrapeWebsite, ScrapingError } from '@/lib/scraper'
import { performDeepAnalysis } from '@/lib/analysis-pipeline'
import { logServer } from "@/lib/server-logger";
const bodySchema = z.object({
url: z.string().min(1),
@@ -20,6 +21,7 @@ export async function POST(request: NextRequest) {
detail?: string
}[] = []
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -99,9 +101,26 @@ export async function POST(request: NextRequest) {
)
}
console.log(`🌐 Scraping: ${url}`)
await logServer({
level: "info",
message: "Scraping website",
labels: ["api", "analyze", "scrape"],
payload: { url },
requestId,
source: "api/analyze",
});
const scrapedContent = await scrapeWebsite(url)
console.log(` ✓ Scraped ${scrapedContent.headings.length} headings, ${scrapedContent.paragraphs.length} paragraphs`)
await logServer({
level: "info",
message: "Scrape complete",
labels: ["api", "analyze", "scrape"],
payload: {
headings: scrapedContent.headings.length,
paragraphs: scrapedContent.paragraphs.length,
},
requestId,
source: "api/analyze",
});
if (jobId) {
await updateTimeline({
key: "scrape",
@@ -111,7 +130,13 @@ export async function POST(request: NextRequest) {
})
}
console.log('🤖 Starting enhanced analysis...')
await logServer({
level: "info",
message: "Starting enhanced analysis",
labels: ["api", "analyze", "analysis"],
requestId,
source: "api/analyze",
});
const progressMap: Record<string, number> = {
features: 35,
competitors: 50,
@@ -128,7 +153,18 @@ export async function POST(request: NextRequest) {
progress: progressMap[update.key] ?? 80,
})
})
console.log(` ✓ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords, ${analysis.dorkQueries.length} queries`)
await logServer({
level: "info",
message: "Analysis complete",
labels: ["api", "analyze", "analysis"],
payload: {
features: analysis.features.length,
keywords: analysis.keywords.length,
dorkQueries: analysis.dorkQueries.length,
},
requestId,
source: "api/analyze",
});
if (jobId) {
await updateTimeline({
key: "finalize",
@@ -186,7 +222,14 @@ export async function POST(request: NextRequest) {
persisted = true
}
} catch (persistError) {
console.error("Failed to persist analysis:", persistError)
await logServer({
level: "error",
message: "Failed to persist analysis",
labels: ["api", "analyze", "persist", "error"],
payload: { error: String(persistError) },
requestId,
source: "api/analyze",
});
}
}
@@ -205,7 +248,17 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Analysis error:', error)
await logServer({
level: "error",
message: "Analysis error",
labels: ["api", "analyze", "error"],
payload: {
message: error?.message,
stack: error?.stack,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/analyze",
});
if (jobId) {
try {

View File

@@ -6,6 +6,7 @@ import { z } from 'zod'
import { generateSearchQueries, getDefaultPlatforms } from '@/lib/query-generator'
import { executeSearches, scoreOpportunities } from '@/lib/search-executor'
import type { EnhancedProductAnalysis, SearchConfig, PlatformConfig } from '@/lib/types'
import { logServer } from "@/lib/server-logger";
const searchSchema = z.object({
projectId: z.string(),
@@ -23,12 +24,15 @@ const searchSchema = z.object({
})),
strategies: z.array(z.string()),
maxResults: z.number().default(50)
minAgeDays: z.number().min(0).max(365).optional(),
maxAgeDays: z.number().min(0).max(365).optional()
})
})
export async function POST(request: NextRequest) {
let jobId: string | undefined
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -41,9 +45,21 @@ export async function POST(request: NextRequest) {
const parsed = searchSchema.parse(body)
const { projectId, config } = parsed
jobId = parsed.jobId
const ageFilters = {
minAgeDays: config.minAgeDays,
maxAgeDays: config.maxAgeDays,
}
if (!process.env.SERPER_API_KEY) {
const errorMessage = "SERPER_API_KEY is not configured. Add it to your environment to run searches."
await logServer({
level: "warn",
message: "Serper API key missing",
labels: ["api", "opportunities", "config", "warn"],
payload: { projectId },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -84,19 +100,43 @@ export async function POST(request: NextRequest) {
const analysis = searchContext.context as EnhancedProductAnalysis
console.log('🔍 Starting opportunity search...')
console.log(` Product: ${analysis.productName}`)
console.log(` Platforms: ${config.platforms.filter(p => p.enabled).map(p => p.name).join(', ')}`)
console.log(` Strategies: ${config.strategies.join(', ')}`)
await logServer({
level: "info",
message: "Starting opportunity search",
labels: ["api", "opportunities", "start"],
payload: {
projectId,
productName: analysis.productName,
platforms: config.platforms.filter((p) => p.enabled).map((p) => p.name),
strategies: config.strategies,
filters: ageFilters,
},
requestId,
source: "api/opportunities",
});
// Generate queries
console.log(' Generating search queries...')
await logServer({
level: "info",
message: "Generating search queries",
labels: ["api", "opportunities", "queries"],
payload: { projectId },
requestId,
source: "api/opportunities",
});
const enforcedConfig: SearchConfig = {
...(config as SearchConfig),
maxResults: Math.min((config as SearchConfig).maxResults || 50, 50),
}
const queries = generateSearchQueries(analysis as EnhancedProductAnalysis, enforcedConfig)
console.log(` ✓ Generated ${queries.length} queries`)
await logServer({
level: "info",
message: "Generated search queries",
labels: ["api", "opportunities", "queries"],
payload: { projectId, count: queries.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -106,9 +146,23 @@ export async function POST(request: NextRequest) {
}
// Execute searches
console.log(' Executing searches...')
const searchResults = await executeSearches(queries)
console.log(` ✓ Found ${searchResults.length} raw results`)
await logServer({
level: "info",
message: "Executing searches",
labels: ["api", "opportunities", "search"],
payload: { projectId, queryCount: queries.length },
requestId,
source: "api/opportunities",
});
const searchResults = await executeSearches(queries, ageFilters)
await logServer({
level: "info",
message: "Searches complete",
labels: ["api", "opportunities", "search"],
payload: { projectId, rawResults: searchResults.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -130,9 +184,23 @@ export async function POST(request: NextRequest) {
const filteredResults = searchResults.filter((result) => !existingSet.has(result.url))
// Score and rank
console.log(' Scoring opportunities...')
await logServer({
level: "info",
message: "Scoring opportunities",
labels: ["api", "opportunities", "score"],
payload: { projectId, candidateResults: filteredResults.length },
requestId,
source: "api/opportunities",
});
const opportunities = scoreOpportunities(filteredResults, analysis as EnhancedProductAnalysis)
console.log(` ✓ Scored ${opportunities.length} opportunities`)
await logServer({
level: "info",
message: "Opportunities scored",
labels: ["api", "opportunities", "score"],
payload: { projectId, scored: opportunities.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -179,7 +247,14 @@ export async function POST(request: NextRequest) {
} catch (error: any) {
const errorMessage =
error instanceof Error ? error.message : typeof error === "string" ? error : "Search failed"
console.error("❌ Opportunity search error:", errorMessage)
await logServer({
level: "error",
message: "Opportunity search error",
labels: ["api", "opportunities", "error"],
payload: { message: errorMessage },
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/opportunities",
});
if (jobId) {
try {

View File

@@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server'
import { isAuthenticatedNextjs } from "@convex-dev/auth/nextjs/server";
import { z } from 'zod'
import type { EnhancedProductAnalysis, Opportunity, DorkQuery } from '@/lib/types'
import { logServer } from "@/lib/server-logger";
import { appendSerperAgeModifiers, SerperAgeFilter } from "@/lib/serper-date-filters";
// Search result from any source
interface SearchResult {
@@ -31,11 +33,14 @@ const bodySchema = z.object({
problem: z.string(),
searchTerms: z.array(z.string())
}))
})
}),
minAgeDays: z.number().min(0).max(365).optional(),
maxAgeDays: z.number().min(0).max(365).optional(),
})
export async function POST(request: NextRequest) {
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -45,16 +50,34 @@ export async function POST(request: NextRequest) {
}
const body = await request.json()
const { analysis } = bodySchema.parse(body)
const { analysis, minAgeDays, maxAgeDays } = bodySchema.parse(body)
const ageFilters: SerperAgeFilter = {
minAgeDays,
maxAgeDays,
}
if (!process.env.SERPER_API_KEY) {
await logServer({
level: "warn",
message: "Serper API key missing",
labels: ["api", "search", "config", "warn"],
requestId,
source: "api/search",
});
return NextResponse.json(
{ error: 'SERPER_API_KEY is not configured. Add it to your environment to run searches.' },
{ status: 400 }
)
}
console.log(`🔍 Finding opportunities for: ${analysis.productName}`)
await logServer({
level: "info",
message: "Finding opportunities",
labels: ["api", "search", "start"],
payload: { productName: analysis.productName, filters: ageFilters },
requestId,
source: "api/search",
});
// Sort queries by priority
const sortedQueries = analysis.dorkQueries
@@ -69,22 +92,50 @@ export async function POST(request: NextRequest) {
// Execute searches
for (const query of sortedQueries) {
try {
console.log(` Searching: ${query.query.substring(0, 60)}...`)
const results = await searchGoogle(query.query, 5)
await logServer({
level: "info",
message: "Searching query",
labels: ["api", "search", "query"],
payload: { query: query.query, platform: query.platform },
requestId,
source: "api/search",
});
const results = await searchGoogle(query.query, 5, ageFilters, requestId)
allResults.push(...results)
// Small delay to avoid rate limiting
await new Promise(r => setTimeout(r, 500))
} catch (e) {
console.error(` Search failed for query: ${query.query.substring(0, 40)}`)
await logServer({
level: "error",
message: "Search failed for query",
labels: ["api", "search", "query", "error"],
payload: { query: query.query, error: String(e) },
requestId,
source: "api/search",
});
}
}
console.log(` Found ${allResults.length} raw results`)
await logServer({
level: "info",
message: "Search complete",
labels: ["api", "search", "results"],
payload: { rawResults: allResults.length },
requestId,
source: "api/search",
});
// Analyze and score opportunities
const opportunities = await analyzeOpportunities(allResults, analysis as EnhancedProductAnalysis)
console.log(` ✓ Analyzed ${opportunities.length} opportunities`)
await logServer({
level: "info",
message: "Opportunities analyzed",
labels: ["api", "search", "analyze"],
payload: { analyzed: opportunities.length },
requestId,
source: "api/search",
});
return NextResponse.json({
success: true,
@@ -100,7 +151,18 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Search error:', error)
await logServer({
level: "error",
message: "Search error",
labels: ["api", "search", "error"],
payload: {
message: error?.message,
stack: error?.stack,
filters: ageFilters,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/search",
});
return NextResponse.json(
{ error: error.message || 'Failed to find opportunities' },
@@ -109,23 +171,42 @@ export async function POST(request: NextRequest) {
}
}
async function searchGoogle(query: string, num: number): Promise<SearchResult[]> {
return searchSerper(query, num)
async function searchGoogle(
query: string,
num: number,
filters?: SerperAgeFilter,
requestId?: string
): Promise<SearchResult[]> {
return searchSerper(query, num, filters, requestId)
}
async function searchSerper(query: string, num: number): Promise<SearchResult[]> {
async function searchSerper(
query: string,
num: number,
filters?: SerperAgeFilter,
requestId?: string
): Promise<SearchResult[]> {
const filteredQuery = appendSerperAgeModifiers(query, filters)
const response = await fetch('https://google.serper.dev/search', {
method: 'POST',
headers: {
'X-API-KEY': process.env.SERPER_API_KEY!,
'Content-Type': 'application/json'
},
body: JSON.stringify({ q: query, num })
body: JSON.stringify({ q: filteredQuery, num })
})
if (!response.ok) throw new Error('Serper API error')
const data = await response.json()
await logServer({
level: "info",
message: "Serper response received",
labels: ["api", "search", "serper", "response"],
payload: { query: filteredQuery, num, filters, data },
requestId,
source: "api/search",
});
return (data.organic || []).map((r: any) => ({
title: r.title,
url: r.link,