This commit is contained in:
2026-02-04 12:51:41 +00:00
parent 4fdbfb0fb3
commit f1e13f87f6
19 changed files with 722 additions and 67 deletions

12
.dockerignore Normal file
View File

@@ -0,0 +1,12 @@
.git
.next
node_modules
npm-debug.log
yarn-error.log
.env
.env.*
.DS_Store
*.log
*.local
docs
scripts

49
Dockerfile Normal file
View File

@@ -0,0 +1,49 @@
FROM node:20-bookworm-slim AS base
ENV NODE_ENV=production
ENV NEXT_TELEMETRY_DISABLED=1
FROM base AS deps
WORKDIR /app
COPY package.json package-lock.json ./
RUN npm ci
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
RUN npm run build
FROM base AS runner
WORKDIR /app
ENV PORT=3000
ENV PUPPETEER_SKIP_DOWNLOAD=1
ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium
RUN apt-get update && apt-get install -y --no-install-recommends \
chromium \
fonts-liberation \
libasound2 \
libatk1.0-0 \
libcups2 \
libdrm2 \
libxkbcommon0 \
libxcomposite1 \
libxdamage1 \
libxrandr2 \
libgbm1 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libgtk-3-0 \
libnss3 \
libx11-xcb1 \
libxss1 \
libxtst6 \
libu2f-udev \
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/public ./public
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
EXPOSE 3000
CMD ["node", "server.js"]

View File

@@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api";
import { z } from 'zod'
import { analyzeFromText } from '@/lib/scraper'
import { performDeepAnalysis } from '@/lib/analysis-pipeline'
import { logServer } from "@/lib/server-logger";
const bodySchema = z.object({
productName: z.string().min(1),
@@ -22,6 +23,7 @@ export async function POST(request: NextRequest) {
detail?: string
}[] = []
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -100,7 +102,14 @@ export async function POST(request: NextRequest) {
)
}
console.log('📝 Creating content from manual input...')
await logServer({
level: "info",
message: "Preparing manual input for analysis",
labels: ["api", "analyze-manual", "scrape"],
payload: { productName },
requestId,
source: "api/analyze-manual",
});
const scrapedContent = await analyzeFromText(productName, description, features)
if (jobId) {
await updateTimeline({
@@ -111,7 +120,13 @@ export async function POST(request: NextRequest) {
})
}
console.log('🤖 Starting enhanced analysis...')
await logServer({
level: "info",
message: "Starting enhanced analysis",
labels: ["api", "analyze-manual", "analysis"],
requestId,
source: "api/analyze-manual",
});
const progressMap: Record<string, number> = {
features: 35,
competitors: 50,
@@ -128,7 +143,17 @@ export async function POST(request: NextRequest) {
progress: progressMap[update.key] ?? 80,
})
})
console.log(` ✓ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords`)
await logServer({
level: "info",
message: "Analysis complete",
labels: ["api", "analyze-manual", "analysis"],
payload: {
features: analysis.features.length,
keywords: analysis.keywords.length,
},
requestId,
source: "api/analyze-manual",
});
if (jobId) {
await updateTimeline({
key: "finalize",
@@ -186,7 +211,14 @@ export async function POST(request: NextRequest) {
persisted = true
}
} catch (persistError) {
console.error("Failed to persist manual analysis:", persistError)
await logServer({
level: "error",
message: "Failed to persist manual analysis",
labels: ["api", "analyze-manual", "persist", "error"],
payload: { error: String(persistError) },
requestId,
source: "api/analyze-manual",
});
}
}
@@ -205,7 +237,17 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Manual analysis error:', error)
await logServer({
level: "error",
message: "Manual analysis error",
labels: ["api", "analyze-manual", "error"],
payload: {
message: error?.message,
stack: error?.stack,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/analyze-manual",
});
if (jobId) {
try {

View File

@@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api";
import { z } from 'zod'
import { scrapeWebsite, ScrapingError } from '@/lib/scraper'
import { performDeepAnalysis } from '@/lib/analysis-pipeline'
import { logServer } from "@/lib/server-logger";
const bodySchema = z.object({
url: z.string().min(1),
@@ -20,6 +21,7 @@ export async function POST(request: NextRequest) {
detail?: string
}[] = []
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -99,9 +101,26 @@ export async function POST(request: NextRequest) {
)
}
console.log(`🌐 Scraping: ${url}`)
await logServer({
level: "info",
message: "Scraping website",
labels: ["api", "analyze", "scrape"],
payload: { url },
requestId,
source: "api/analyze",
});
const scrapedContent = await scrapeWebsite(url)
console.log(` ✓ Scraped ${scrapedContent.headings.length} headings, ${scrapedContent.paragraphs.length} paragraphs`)
await logServer({
level: "info",
message: "Scrape complete",
labels: ["api", "analyze", "scrape"],
payload: {
headings: scrapedContent.headings.length,
paragraphs: scrapedContent.paragraphs.length,
},
requestId,
source: "api/analyze",
});
if (jobId) {
await updateTimeline({
key: "scrape",
@@ -111,7 +130,13 @@ export async function POST(request: NextRequest) {
})
}
console.log('🤖 Starting enhanced analysis...')
await logServer({
level: "info",
message: "Starting enhanced analysis",
labels: ["api", "analyze", "analysis"],
requestId,
source: "api/analyze",
});
const progressMap: Record<string, number> = {
features: 35,
competitors: 50,
@@ -128,7 +153,18 @@ export async function POST(request: NextRequest) {
progress: progressMap[update.key] ?? 80,
})
})
console.log(` ✓ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords, ${analysis.dorkQueries.length} queries`)
await logServer({
level: "info",
message: "Analysis complete",
labels: ["api", "analyze", "analysis"],
payload: {
features: analysis.features.length,
keywords: analysis.keywords.length,
dorkQueries: analysis.dorkQueries.length,
},
requestId,
source: "api/analyze",
});
if (jobId) {
await updateTimeline({
key: "finalize",
@@ -186,7 +222,14 @@ export async function POST(request: NextRequest) {
persisted = true
}
} catch (persistError) {
console.error("Failed to persist analysis:", persistError)
await logServer({
level: "error",
message: "Failed to persist analysis",
labels: ["api", "analyze", "persist", "error"],
payload: { error: String(persistError) },
requestId,
source: "api/analyze",
});
}
}
@@ -205,7 +248,17 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Analysis error:', error)
await logServer({
level: "error",
message: "Analysis error",
labels: ["api", "analyze", "error"],
payload: {
message: error?.message,
stack: error?.stack,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/analyze",
});
if (jobId) {
try {

View File

@@ -6,6 +6,7 @@ import { z } from 'zod'
import { generateSearchQueries, getDefaultPlatforms } from '@/lib/query-generator'
import { executeSearches, scoreOpportunities } from '@/lib/search-executor'
import type { EnhancedProductAnalysis, SearchConfig, PlatformConfig } from '@/lib/types'
import { logServer } from "@/lib/server-logger";
const searchSchema = z.object({
projectId: z.string(),
@@ -23,12 +24,15 @@ const searchSchema = z.object({
})),
strategies: z.array(z.string()),
maxResults: z.number().default(50)
minAgeDays: z.number().min(0).max(365).optional(),
maxAgeDays: z.number().min(0).max(365).optional()
})
})
export async function POST(request: NextRequest) {
let jobId: string | undefined
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -41,9 +45,21 @@ export async function POST(request: NextRequest) {
const parsed = searchSchema.parse(body)
const { projectId, config } = parsed
jobId = parsed.jobId
const ageFilters = {
minAgeDays: config.minAgeDays,
maxAgeDays: config.maxAgeDays,
}
if (!process.env.SERPER_API_KEY) {
const errorMessage = "SERPER_API_KEY is not configured. Add it to your environment to run searches."
await logServer({
level: "warn",
message: "Serper API key missing",
labels: ["api", "opportunities", "config", "warn"],
payload: { projectId },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -84,19 +100,43 @@ export async function POST(request: NextRequest) {
const analysis = searchContext.context as EnhancedProductAnalysis
console.log('🔍 Starting opportunity search...')
console.log(` Product: ${analysis.productName}`)
console.log(` Platforms: ${config.platforms.filter(p => p.enabled).map(p => p.name).join(', ')}`)
console.log(` Strategies: ${config.strategies.join(', ')}`)
await logServer({
level: "info",
message: "Starting opportunity search",
labels: ["api", "opportunities", "start"],
payload: {
projectId,
productName: analysis.productName,
platforms: config.platforms.filter((p) => p.enabled).map((p) => p.name),
strategies: config.strategies,
filters: ageFilters,
},
requestId,
source: "api/opportunities",
});
// Generate queries
console.log(' Generating search queries...')
await logServer({
level: "info",
message: "Generating search queries",
labels: ["api", "opportunities", "queries"],
payload: { projectId },
requestId,
source: "api/opportunities",
});
const enforcedConfig: SearchConfig = {
...(config as SearchConfig),
maxResults: Math.min((config as SearchConfig).maxResults || 50, 50),
}
const queries = generateSearchQueries(analysis as EnhancedProductAnalysis, enforcedConfig)
console.log(` ✓ Generated ${queries.length} queries`)
await logServer({
level: "info",
message: "Generated search queries",
labels: ["api", "opportunities", "queries"],
payload: { projectId, count: queries.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -106,9 +146,23 @@ export async function POST(request: NextRequest) {
}
// Execute searches
console.log(' Executing searches...')
const searchResults = await executeSearches(queries)
console.log(` ✓ Found ${searchResults.length} raw results`)
await logServer({
level: "info",
message: "Executing searches",
labels: ["api", "opportunities", "search"],
payload: { projectId, queryCount: queries.length },
requestId,
source: "api/opportunities",
});
const searchResults = await executeSearches(queries, ageFilters)
await logServer({
level: "info",
message: "Searches complete",
labels: ["api", "opportunities", "search"],
payload: { projectId, rawResults: searchResults.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -130,9 +184,23 @@ export async function POST(request: NextRequest) {
const filteredResults = searchResults.filter((result) => !existingSet.has(result.url))
// Score and rank
console.log(' Scoring opportunities...')
await logServer({
level: "info",
message: "Scoring opportunities",
labels: ["api", "opportunities", "score"],
payload: { projectId, candidateResults: filteredResults.length },
requestId,
source: "api/opportunities",
});
const opportunities = scoreOpportunities(filteredResults, analysis as EnhancedProductAnalysis)
console.log(` ✓ Scored ${opportunities.length} opportunities`)
await logServer({
level: "info",
message: "Opportunities scored",
labels: ["api", "opportunities", "score"],
payload: { projectId, scored: opportunities.length },
requestId,
source: "api/opportunities",
});
if (jobId) {
await fetchMutation(
api.searchJobs.update,
@@ -179,7 +247,14 @@ export async function POST(request: NextRequest) {
} catch (error: any) {
const errorMessage =
error instanceof Error ? error.message : typeof error === "string" ? error : "Search failed"
console.error("❌ Opportunity search error:", errorMessage)
await logServer({
level: "error",
message: "Opportunity search error",
labels: ["api", "opportunities", "error"],
payload: { message: errorMessage },
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/opportunities",
});
if (jobId) {
try {

View File

@@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server'
import { isAuthenticatedNextjs } from "@convex-dev/auth/nextjs/server";
import { z } from 'zod'
import type { EnhancedProductAnalysis, Opportunity, DorkQuery } from '@/lib/types'
import { logServer } from "@/lib/server-logger";
import { appendSerperAgeModifiers, SerperAgeFilter } from "@/lib/serper-date-filters";
// Search result from any source
interface SearchResult {
@@ -31,11 +33,14 @@ const bodySchema = z.object({
problem: z.string(),
searchTerms: z.array(z.string())
}))
})
}),
minAgeDays: z.number().min(0).max(365).optional(),
maxAgeDays: z.number().min(0).max(365).optional(),
})
export async function POST(request: NextRequest) {
try {
const requestId = request.headers.get("x-request-id") ?? undefined;
if (!(await isAuthenticatedNextjs())) {
const redirectUrl = new URL("/auth", request.url);
const referer = request.headers.get("referer");
@@ -45,16 +50,34 @@ export async function POST(request: NextRequest) {
}
const body = await request.json()
const { analysis } = bodySchema.parse(body)
const { analysis, minAgeDays, maxAgeDays } = bodySchema.parse(body)
const ageFilters: SerperAgeFilter = {
minAgeDays,
maxAgeDays,
}
if (!process.env.SERPER_API_KEY) {
await logServer({
level: "warn",
message: "Serper API key missing",
labels: ["api", "search", "config", "warn"],
requestId,
source: "api/search",
});
return NextResponse.json(
{ error: 'SERPER_API_KEY is not configured. Add it to your environment to run searches.' },
{ status: 400 }
)
}
console.log(`🔍 Finding opportunities for: ${analysis.productName}`)
await logServer({
level: "info",
message: "Finding opportunities",
labels: ["api", "search", "start"],
payload: { productName: analysis.productName, filters: ageFilters },
requestId,
source: "api/search",
});
// Sort queries by priority
const sortedQueries = analysis.dorkQueries
@@ -69,22 +92,50 @@ export async function POST(request: NextRequest) {
// Execute searches
for (const query of sortedQueries) {
try {
console.log(` Searching: ${query.query.substring(0, 60)}...`)
const results = await searchGoogle(query.query, 5)
await logServer({
level: "info",
message: "Searching query",
labels: ["api", "search", "query"],
payload: { query: query.query, platform: query.platform },
requestId,
source: "api/search",
});
const results = await searchGoogle(query.query, 5, ageFilters, requestId)
allResults.push(...results)
// Small delay to avoid rate limiting
await new Promise(r => setTimeout(r, 500))
} catch (e) {
console.error(` Search failed for query: ${query.query.substring(0, 40)}`)
await logServer({
level: "error",
message: "Search failed for query",
labels: ["api", "search", "query", "error"],
payload: { query: query.query, error: String(e) },
requestId,
source: "api/search",
});
}
}
console.log(` Found ${allResults.length} raw results`)
await logServer({
level: "info",
message: "Search complete",
labels: ["api", "search", "results"],
payload: { rawResults: allResults.length },
requestId,
source: "api/search",
});
// Analyze and score opportunities
const opportunities = await analyzeOpportunities(allResults, analysis as EnhancedProductAnalysis)
console.log(` ✓ Analyzed ${opportunities.length} opportunities`)
await logServer({
level: "info",
message: "Opportunities analyzed",
labels: ["api", "search", "analyze"],
payload: { analyzed: opportunities.length },
requestId,
source: "api/search",
});
return NextResponse.json({
success: true,
@@ -100,7 +151,18 @@ export async function POST(request: NextRequest) {
})
} catch (error: any) {
console.error('❌ Search error:', error)
await logServer({
level: "error",
message: "Search error",
labels: ["api", "search", "error"],
payload: {
message: error?.message,
stack: error?.stack,
filters: ageFilters,
},
requestId: request.headers.get("x-request-id") ?? undefined,
source: "api/search",
});
return NextResponse.json(
{ error: error.message || 'Failed to find opportunities' },
@@ -109,23 +171,42 @@ export async function POST(request: NextRequest) {
}
}
async function searchGoogle(query: string, num: number): Promise<SearchResult[]> {
return searchSerper(query, num)
async function searchGoogle(
query: string,
num: number,
filters?: SerperAgeFilter,
requestId?: string
): Promise<SearchResult[]> {
return searchSerper(query, num, filters, requestId)
}
async function searchSerper(query: string, num: number): Promise<SearchResult[]> {
async function searchSerper(
query: string,
num: number,
filters?: SerperAgeFilter,
requestId?: string
): Promise<SearchResult[]> {
const filteredQuery = appendSerperAgeModifiers(query, filters)
const response = await fetch('https://google.serper.dev/search', {
method: 'POST',
headers: {
'X-API-KEY': process.env.SERPER_API_KEY!,
'Content-Type': 'application/json'
},
body: JSON.stringify({ q: query, num })
body: JSON.stringify({ q: filteredQuery, num })
})
if (!response.ok) throw new Error('Serper API error')
const data = await response.json()
await logServer({
level: "info",
message: "Serper response received",
labels: ["api", "search", "serper", "response"],
payload: { query: filteredQuery, num, filters, data },
requestId,
source: "api/search",
});
return (data.organic || []).map((r: any) => ({
title: r.title,
url: r.link,

View File

@@ -209,6 +209,8 @@ export default function OpportunitiesPage() {
'competitor-alternative'
])
const [maxQueries, setMaxQueries] = useState(50)
const [minAgeDays, setMinAgeDays] = useState(0)
const [maxAgeDays, setMaxAgeDays] = useState(30)
const [goalPreset, setGoalPreset] = useState<string>('high-intent')
const [isSearching, setIsSearching] = useState(false)
const [opportunities, setOpportunities] = useState<Opportunity[]>([])
@@ -351,6 +353,14 @@ export default function OpportunitiesPage() {
}))
)
}
if (typeof parsed.minAgeDays === 'number' || typeof parsed.maxAgeDays === 'number') {
const rawMin = typeof parsed.minAgeDays === 'number' ? Math.max(parsed.minAgeDays, 0) : 0
const rawMax = typeof parsed.maxAgeDays === 'number' ? Math.max(parsed.maxAgeDays, 0) : 30
const normalizedMin = Math.min(rawMin, rawMax)
const normalizedMax = Math.max(rawMax, normalizedMin)
setMinAgeDays(normalizedMin)
setMaxAgeDays(normalizedMax)
}
} catch {
// Ignore invalid cached config.
}
@@ -380,9 +390,11 @@ export default function OpportunitiesPage() {
strategies,
maxQueries,
platforms,
minAgeDays,
maxAgeDays,
}
localStorage.setItem(key, JSON.stringify(payload))
}, [selectedProjectId, goalPreset, strategies, maxQueries, platforms])
}, [selectedProjectId, goalPreset, strategies, maxQueries, platforms, minAgeDays, maxAgeDays])
useEffect(() => {
if (!analysis && latestAnalysis === null) {
@@ -483,7 +495,9 @@ export default function OpportunitiesPage() {
searchTemplate: platform.searchTemplate ?? "",
})),
strategies,
maxResults: Math.min(maxQueries, 50)
maxResults: Math.min(maxQueries, 50),
minAgeDays: minAgeDays > 0 ? minAgeDays : undefined,
maxAgeDays: maxAgeDays > 0 ? maxAgeDays : undefined,
}
setLastSearchConfig(config as SearchConfig)
@@ -769,6 +783,9 @@ export default function OpportunitiesPage() {
<span>·</span>
<span>max {maxQueries} queries</span>
</div>
<div className="text-xs text-muted-foreground">
Lead age window: {minAgeDays === 0 ? 'newest' : `${minAgeDays}+ days old`} {maxAgeDays > 0 ? `up to ${maxAgeDays} days` : 'any age'}
</div>
{platforms.filter(p => p.enabled).length === 0 && (
<p className="text-xs text-muted-foreground">Select at least one source to search.</p>
)}
@@ -835,6 +852,42 @@ export default function OpportunitiesPage() {
</div>
))}
</div>
<div className="space-y-3">
<Label className="text-sm font-medium uppercase text-muted-foreground">Lead freshness</Label>
<p className="text-xs text-muted-foreground">
Restrict opportunities by lead age. Set a maximum age to avoid archived threads and an optional minimum age to skip brand-new posts.
</p>
<div className="space-y-2">
<div className="flex items-center justify-between text-xs text-muted-foreground">
<span>Min age (older than)</span>
<span>{minAgeDays} day{minAgeDays === 1 ? '' : 's'}</span>
</div>
<Slider
value={[minAgeDays]}
onValueChange={([value]) => {
const limited = maxAgeDays > 0 ? Math.min(value, maxAgeDays) : value
setMinAgeDays(limited)
}}
min={0}
max={365}
step={1}
/>
<div className="flex items-center justify-between text-xs text-muted-foreground">
<span>Max age (newer than)</span>
<span>{maxAgeDays > 0 ? `${maxAgeDays} days` : 'Any'}</span>
</div>
<Slider
value={[maxAgeDays]}
onValueChange={([value]) => {
const nextMax = Math.max(value, minAgeDays)
setMaxAgeDays(nextMax)
}}
min={0}
max={365}
step={1}
/>
</div>
</div>
</div>
</ScrollArea>
</DialogContent>

View File

@@ -14,6 +14,7 @@ import type * as analysisSections from "../analysisSections.js";
import type * as auth from "../auth.js";
import type * as dataSources from "../dataSources.js";
import type * as http from "../http.js";
import type * as logs from "../logs.js";
import type * as opportunities from "../opportunities.js";
import type * as projects from "../projects.js";
import type * as searchJobs from "../searchJobs.js";
@@ -33,6 +34,7 @@ declare const fullApi: ApiFromModules<{
auth: typeof auth;
dataSources: typeof dataSources;
http: typeof http;
logs: typeof logs;
opportunities: typeof opportunities;
projects: typeof projects;
searchJobs: typeof searchJobs;

32
convex/logs.ts Normal file
View File

@@ -0,0 +1,32 @@
import { mutation } from "./_generated/server";
import { v } from "convex/values";
import { getAuthUserId } from "@convex-dev/auth/server";
export const createLog = mutation({
args: {
level: v.union(
v.literal("debug"),
v.literal("info"),
v.literal("warn"),
v.literal("error")
),
message: v.string(),
labels: v.array(v.string()),
payload: v.optional(v.any()),
source: v.optional(v.string()),
requestId: v.optional(v.string()),
projectId: v.optional(v.id("projects")),
},
handler: async (ctx, args) => {
const userId = await getAuthUserId(ctx);
const base = {
...args,
createdAt: Date.now(),
};
if (userId) {
await ctx.db.insert("logs", { ...base, userId });
return;
}
await ctx.db.insert("logs", base);
},
});

View File

@@ -221,6 +221,24 @@ const schema = defineSchema({
})
.index("by_project_status", ["projectId", "status"])
.index("by_project_createdAt", ["projectId", "createdAt"]),
logs: defineTable({
level: v.union(
v.literal("debug"),
v.literal("info"),
v.literal("warn"),
v.literal("error")
),
message: v.string(),
labels: v.array(v.string()),
payload: v.optional(v.any()),
source: v.optional(v.string()),
requestId: v.optional(v.string()),
projectId: v.optional(v.id("projects")),
userId: v.optional(v.id("users")),
createdAt: v.number(),
})
.index("by_createdAt", ["createdAt"])
.index("by_project_createdAt", ["projectId", "createdAt"]),
});
export default schema;

View File

@@ -10,6 +10,7 @@ import type {
Competitor,
DorkQuery
} from './types'
import { logServer } from "@/lib/server-logger";
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY
@@ -46,7 +47,13 @@ async function aiGenerate<T>(prompt: string, systemPrompt: string, temperature:
try { return JSON.parse(jsonStr) as T }
catch (e) {
console.error('Failed to parse JSON:', jsonStr.substring(0, 200))
await logServer({
level: "error",
message: "Failed to parse JSON from AI response",
labels: ["analysis-pipeline", "ai", "error"],
payload: { sample: jsonStr.substring(0, 200) },
source: "lib/analysis-pipeline",
});
throw new Error('Invalid JSON response from AI')
}
}
@@ -490,60 +497,149 @@ export async function performDeepAnalysis(
content: ScrapedContent,
onProgress?: (update: AnalysisProgressUpdate) => void | Promise<void>
): Promise<EnhancedProductAnalysis> {
console.log('🔍 Starting deep analysis...')
await logServer({
level: "info",
message: "Starting deep analysis",
labels: ["analysis-pipeline", "start"],
source: "lib/analysis-pipeline",
});
console.log(' 🧭 Product profiling...')
await logServer({
level: "info",
message: "Product profiling",
labels: ["analysis-pipeline", "profile"],
source: "lib/analysis-pipeline",
});
const productProfile = await extractProductProfile(content)
console.log(` ✓ Profiled as ${productProfile.category} for ${productProfile.targetPersona} (conf ${productProfile.confidence})`)
await logServer({
level: "info",
message: "Product profile complete",
labels: ["analysis-pipeline", "profile"],
payload: {
category: productProfile.category,
targetPersona: productProfile.targetPersona,
confidence: productProfile.confidence,
},
source: "lib/analysis-pipeline",
});
console.log(' 📦 Pass 1: Features...')
await logServer({
level: "info",
message: "Pass 1: Features",
labels: ["analysis-pipeline", "features"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "features", status: "running" })
const features = await extractFeatures(content)
console.log(`${features.length} features`)
await logServer({
level: "info",
message: "Features extracted",
labels: ["analysis-pipeline", "features"],
payload: { count: features.length },
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "features", status: "completed", detail: `${features.length} features` })
console.log(' 🏆 Pass 2: Competitors...')
await logServer({
level: "info",
message: "Pass 2: Competitors",
labels: ["analysis-pipeline", "competitors"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "competitors", status: "running" })
const candidateSet = await generateCompetitorCandidates(productProfile)
const competitors = await selectDirectCompetitors(productProfile, candidateSet.candidates)
console.log(`${competitors.length} competitors: ${competitors.map(c => c.name).join(', ')}`)
await logServer({
level: "info",
message: "Competitors extracted",
labels: ["analysis-pipeline", "competitors"],
payload: { count: competitors.length, names: competitors.map((c) => c.name) },
source: "lib/analysis-pipeline",
});
await onProgress?.({
key: "competitors",
status: "completed",
detail: `${competitors.length} competitors: ${competitors.map(c => c.name).join(', ')}`
})
console.log(' 🔑 Pass 3: Keywords...')
await logServer({
level: "info",
message: "Pass 3: Keywords",
labels: ["analysis-pipeline", "keywords"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "keywords", status: "running" })
const keywords = await generateKeywords(features, content, competitors)
console.log(`${keywords.length} keywords (${keywords.filter(k => k.type === 'differentiator').length} differentiators)`)
await logServer({
level: "info",
message: "Keywords extracted",
labels: ["analysis-pipeline", "keywords"],
payload: {
count: keywords.length,
differentiators: keywords.filter((k) => k.type === "differentiator").length,
},
source: "lib/analysis-pipeline",
});
await onProgress?.({
key: "keywords",
status: "completed",
detail: `${keywords.length} keywords (${keywords.filter(k => k.type === 'differentiator').length} differentiators)`
})
console.log(' 🎯 Pass 4: Problems...')
await logServer({
level: "info",
message: "Pass 4: Problems",
labels: ["analysis-pipeline", "problems"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "problems", status: "running" })
const problems = await identifyProblems(features, content)
const personas = await generatePersonas(content, problems)
console.log(`${problems.length} problems, ${personas.length} personas`)
await logServer({
level: "info",
message: "Problems and personas extracted",
labels: ["analysis-pipeline", "problems"],
payload: { problems: problems.length, personas: personas.length },
source: "lib/analysis-pipeline",
});
await onProgress?.({
key: "problems",
status: "completed",
detail: `${problems.length} problems, ${personas.length} personas`
})
console.log(' 💡 Pass 5: Use cases...')
await logServer({
level: "info",
message: "Pass 5: Use cases",
labels: ["analysis-pipeline", "use-cases"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "useCases", status: "running" })
const useCases = await generateUseCases(features, personas, problems)
console.log(`${useCases.length} use cases`)
await logServer({
level: "info",
message: "Use cases extracted",
labels: ["analysis-pipeline", "use-cases"],
payload: { count: useCases.length },
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "useCases", status: "completed", detail: `${useCases.length} use cases` })
console.log(' 🔎 Pass 6: Dork queries...')
await logServer({
level: "info",
message: "Pass 6: Dork queries",
labels: ["analysis-pipeline", "dork-queries"],
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "dorkQueries", status: "running" })
const dorkQueries = generateDorkQueries(keywords, problems, useCases, competitors)
console.log(`${dorkQueries.length} queries`)
await logServer({
level: "info",
message: "Dork queries extracted",
labels: ["analysis-pipeline", "dork-queries"],
payload: { count: dorkQueries.length },
source: "lib/analysis-pipeline",
});
await onProgress?.({ key: "dorkQueries", status: "completed", detail: `${dorkQueries.length} queries` })
const productName = content.title.split(/[\|\-–—:]/)[0].trim()

View File

@@ -1,5 +1,6 @@
import OpenAI from 'openai'
import type { ProductAnalysis, ScrapedContent, Opportunity } from './types'
import { logServer } from "@/lib/server-logger";
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY
@@ -111,7 +112,13 @@ export async function findOpportunities(analysis: ProductAnalysis): Promise<Oppo
}
}
} catch (e) {
console.error('Search failed:', e)
await logServer({
level: "error",
message: "Search failed",
labels: ["openai", "search", "error"],
payload: { error: String(e) },
source: "lib/openai",
});
}
}
@@ -213,7 +220,13 @@ async function searchGoogle(query: string, num: number): Promise<SearchResult[]>
const results = await searchSerper(query, num)
if (results.length > 0) return results
} catch (e) {
console.error('Serper search failed:', e)
await logServer({
level: "error",
message: "Serper search failed",
labels: ["openai", "serper", "error"],
payload: { error: String(e) },
source: "lib/openai",
});
}
}
@@ -234,6 +247,13 @@ async function searchSerper(query: string, num: number): Promise<SearchResult[]>
if (!response.ok) throw new Error('Serper API error')
const data = await response.json()
await logServer({
level: "info",
message: "Serper response received",
labels: ["openai", "serper", "response"],
payload: { query, num, data },
source: "lib/openai",
});
return (data.organic || []).map((r: any) => ({
title: r.title,
url: r.link,

View File

@@ -5,6 +5,7 @@ import type {
SearchStrategy,
PlatformId
} from './types'
import { logServer } from "@/lib/server-logger";
export function getDefaultPlatforms(): Record<PlatformId, { name: string; icon: string; rateLimit: number; enabled: boolean; searchTemplate: string }> {
return {
@@ -77,9 +78,13 @@ export function generateSearchQueries(
const deduped = sortAndDedupeQueries(queries)
const limited = deduped.slice(0, config.maxResults || 50)
console.info(
`[opportunities] queries: generated=${queries.length} deduped=${deduped.length} limited=${limited.length}`
)
void logServer({
level: "info",
message: "Search queries generated",
labels: ["query-generator", "queries"],
payload: { generated: queries.length, deduped: deduped.length, limited: limited.length },
source: "lib/query-generator",
});
return limited
}

View File

@@ -1,5 +1,6 @@
import puppeteer from 'puppeteer'
import type { ScrapedContent } from './types'
import { logServer } from "@/lib/server-logger";
export class ScrapingError extends Error {
constructor(message: string, public code: string) {
@@ -94,7 +95,13 @@ export async function scrapeWebsite(url: string): Promise<ScrapedContent> {
}
} catch (error: any) {
console.error('Scraping error:', error)
await logServer({
level: "error",
message: "Scraping error",
labels: ["scraper", "error"],
payload: { url: validatedUrl, error: String(error) },
source: "lib/scraper",
});
if (error.message?.includes('ERR_NAME_NOT_RESOLVED') || error.message?.includes('net::ERR')) {
throw new ScrapingError(

View File

@@ -1,4 +1,6 @@
import type { GeneratedQuery, Opportunity, EnhancedProductAnalysis } from './types'
import { logServer } from "@/lib/server-logger";
import { appendSerperAgeModifiers, SerperAgeFilter } from "@/lib/serper-date-filters";
interface SearchResult {
title: string
@@ -10,6 +12,7 @@ interface SearchResult {
export async function executeSearches(
queries: GeneratedQuery[],
filters?: SerperAgeFilter,
onProgress?: (progress: { current: number; total: number; platform: string }) => void
): Promise<SearchResult[]> {
const results: SearchResult[] = []
@@ -24,11 +27,17 @@ export async function executeSearches(
let completed = 0
for (const [platform, platformQueries] of byPlatform) {
console.log(`Searching ${platform}: ${platformQueries.length} queries`)
await logServer({
level: "info",
message: "Searching platform",
labels: ["search-executor", "platform", "start"],
payload: { platform, queries: platformQueries.length },
source: "lib/search-executor",
});
for (const query of platformQueries) {
try {
const searchResults = await executeSingleSearch(query)
const searchResults = await executeSingleSearch(query, filters)
results.push(...searchResults)
completed++
@@ -37,7 +46,13 @@ export async function executeSearches(
// Rate limiting - 1 second between requests
await delay(1000)
} catch (err) {
console.error(`Search failed for ${platform}:`, err)
await logServer({
level: "error",
message: "Search failed for platform",
labels: ["search-executor", "platform", "error"],
payload: { platform, error: String(err) },
source: "lib/search-executor",
});
}
}
}
@@ -45,15 +60,15 @@ export async function executeSearches(
return results
}
async function executeSingleSearch(query: GeneratedQuery): Promise<SearchResult[]> {
async function executeSingleSearch(query: GeneratedQuery, filters?: SerperAgeFilter): Promise<SearchResult[]> {
if (!process.env.SERPER_API_KEY) {
throw new Error('SERPER_API_KEY is not configured.')
}
return searchWithSerper(query)
return searchWithSerper(query, filters)
}
async function searchWithSerper(query: GeneratedQuery): Promise<SearchResult[]> {
async function searchWithSerper(query: GeneratedQuery, filters?: SerperAgeFilter): Promise<SearchResult[]> {
const response = await fetch('https://google.serper.dev/search', {
method: 'POST',
headers: {
@@ -61,7 +76,7 @@ async function searchWithSerper(query: GeneratedQuery): Promise<SearchResult[]>
'Content-Type': 'application/json'
},
body: JSON.stringify({
q: query.query,
q: appendSerperAgeModifiers(query.query, filters),
num: 5,
gl: 'us',
hl: 'en'
@@ -73,6 +88,13 @@ async function searchWithSerper(query: GeneratedQuery): Promise<SearchResult[]>
}
const data = await response.json()
await logServer({
level: "info",
message: "Serper response received",
labels: ["search-executor", "serper", "response"],
payload: { query: query.query, platform: query.platform, data },
source: "lib/search-executor",
});
return (data.organic || []).map((r: any) => ({
title: r.title,

View File

@@ -0,0 +1,28 @@
export type SerperAgeFilter = {
maxAgeDays?: number
minAgeDays?: number
}
const normalizeDays = (value?: number) => {
if (typeof value !== 'number') return undefined
if (!Number.isFinite(value)) return undefined
if (value <= 0) return undefined
return Math.floor(value)
}
export function appendSerperAgeModifiers(query: string, filters?: SerperAgeFilter): string {
if (!filters) return query
const modifiers: string[] = []
const normalizedMax = normalizeDays(filters.maxAgeDays)
const normalizedMin = normalizeDays(filters.minAgeDays)
if (typeof normalizedMax === 'number') {
modifiers.push(`newer_than:${normalizedMax}d`)
}
if (typeof normalizedMin === 'number') {
modifiers.push(`older_than:${normalizedMin}d`)
}
if (modifiers.length === 0) return query
return `${query} ${modifiers.join(' ')}`
}

57
lib/server-logger.ts Normal file
View File

@@ -0,0 +1,57 @@
import { fetchMutation } from "convex/nextjs";
import { api } from "@/convex/_generated/api";
import type { Id } from "@/convex/_generated/dataModel";
type LogLevel = "debug" | "info" | "warn" | "error";
type LogParams = {
level: LogLevel;
message: string;
labels: string[];
payload?: unknown;
source?: string;
requestId?: string;
projectId?: Id<"projects">;
token?: string;
};
function writeConsole(level: LogLevel, message: string, payload?: unknown) {
const tag = `[${level}]`;
if (payload === undefined) {
if (level === "error") {
console.error(tag, message);
return;
}
if (level === "warn") {
console.warn(tag, message);
return;
}
console.log(tag, message);
return;
}
if (level === "error") {
console.error(tag, message, payload);
return;
}
if (level === "warn") {
console.warn(tag, message, payload);
return;
}
console.log(tag, message, payload);
}
export async function logServer({
token,
...args
}: LogParams): Promise<void> {
writeConsole(args.level, args.message, args.payload);
try {
if (token) {
await fetchMutation(api.logs.createLog, args, { token });
return;
}
await fetchMutation(api.logs.createLog, args);
} catch (error) {
console.error("[logger] Failed to write log", error);
}
}

View File

@@ -26,6 +26,8 @@ export interface SearchConfig {
platforms: PlatformConfig[]
strategies: SearchStrategy[]
maxResults: number
minAgeDays?: number
maxAgeDays?: number
timeFilter?: 'past-day' | 'past-week' | 'past-month' | 'past-year' | 'all'
}

View File

@@ -1,5 +1,6 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
output: 'standalone',
experimental: {
serverComponentsExternalPackages: ['puppeteer']
}