From f1e13f87f6748f54215caf77efb68e4eb1c84d14 Mon Sep 17 00:00:00 2001 From: Matiss Jurevics Date: Wed, 4 Feb 2026 12:51:41 +0000 Subject: [PATCH] dates --- .dockerignore | 12 +++ Dockerfile | 49 ++++++++++++ app/api/analyze-manual/route.ts | 52 +++++++++++-- app/api/analyze/route.ts | 65 ++++++++++++++-- app/api/opportunities/route.ts | 99 +++++++++++++++++++++--- app/api/search/route.ts | 107 ++++++++++++++++++++++---- app/app/(app)/search/page.tsx | 57 +++++++++++++- convex/_generated/api.d.ts | 2 + convex/logs.ts | 32 ++++++++ convex/schema.ts | 18 +++++ lib/analysis-pipeline.ts | 128 ++++++++++++++++++++++++++++---- lib/openai.ts | 24 +++++- lib/query-generator.ts | 11 ++- lib/scraper.ts | 9 ++- lib/search-executor.ts | 36 +++++++-- lib/serper-date-filters.ts | 28 +++++++ lib/server-logger.ts | 57 ++++++++++++++ lib/types.ts | 2 + next.config.js | 1 + 19 files changed, 722 insertions(+), 67 deletions(-) create mode 100644 .dockerignore create mode 100644 Dockerfile create mode 100644 convex/logs.ts create mode 100644 lib/serper-date-filters.ts create mode 100644 lib/server-logger.ts diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..3b0c3f8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,12 @@ +.git +.next +node_modules +npm-debug.log +yarn-error.log +.env +.env.* +.DS_Store +*.log +*.local +docs +scripts diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f3701b2 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,49 @@ +FROM node:20-bookworm-slim AS base +ENV NODE_ENV=production +ENV NEXT_TELEMETRY_DISABLED=1 + +FROM base AS deps +WORKDIR /app +COPY package.json package-lock.json ./ +RUN npm ci + +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . +RUN npm run build + +FROM base AS runner +WORKDIR /app +ENV PORT=3000 +ENV PUPPETEER_SKIP_DOWNLOAD=1 +ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium + +RUN apt-get update && apt-get install -y --no-install-recommends \ + chromium \ + fonts-liberation \ + libasound2 \ + libatk1.0-0 \ + libcups2 \ + libdrm2 \ + libxkbcommon0 \ + libxcomposite1 \ + libxdamage1 \ + libxrandr2 \ + libgbm1 \ + libpango-1.0-0 \ + libpangocairo-1.0-0 \ + libgtk-3-0 \ + libnss3 \ + libx11-xcb1 \ + libxss1 \ + libxtst6 \ + libu2f-udev \ + && rm -rf /var/lib/apt/lists/* + +COPY --from=builder /app/public ./public +COPY --from=builder /app/.next/standalone ./ +COPY --from=builder /app/.next/static ./.next/static + +EXPOSE 3000 +CMD ["node", "server.js"] diff --git a/app/api/analyze-manual/route.ts b/app/api/analyze-manual/route.ts index ea1b45a..ec6443d 100644 --- a/app/api/analyze-manual/route.ts +++ b/app/api/analyze-manual/route.ts @@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api"; import { z } from 'zod' import { analyzeFromText } from '@/lib/scraper' import { performDeepAnalysis } from '@/lib/analysis-pipeline' +import { logServer } from "@/lib/server-logger"; const bodySchema = z.object({ productName: z.string().min(1), @@ -22,6 +23,7 @@ export async function POST(request: NextRequest) { detail?: string }[] = [] try { + const requestId = request.headers.get("x-request-id") ?? undefined; if (!(await isAuthenticatedNextjs())) { const redirectUrl = new URL("/auth", request.url); const referer = request.headers.get("referer"); @@ -100,7 +102,14 @@ export async function POST(request: NextRequest) { ) } - console.log('๐Ÿ“ Creating content from manual input...') + await logServer({ + level: "info", + message: "Preparing manual input for analysis", + labels: ["api", "analyze-manual", "scrape"], + payload: { productName }, + requestId, + source: "api/analyze-manual", + }); const scrapedContent = await analyzeFromText(productName, description, features) if (jobId) { await updateTimeline({ @@ -111,7 +120,13 @@ export async function POST(request: NextRequest) { }) } - console.log('๐Ÿค– Starting enhanced analysis...') + await logServer({ + level: "info", + message: "Starting enhanced analysis", + labels: ["api", "analyze-manual", "analysis"], + requestId, + source: "api/analyze-manual", + }); const progressMap: Record = { features: 35, competitors: 50, @@ -128,7 +143,17 @@ export async function POST(request: NextRequest) { progress: progressMap[update.key] ?? 80, }) }) - console.log(` โœ“ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords`) + await logServer({ + level: "info", + message: "Analysis complete", + labels: ["api", "analyze-manual", "analysis"], + payload: { + features: analysis.features.length, + keywords: analysis.keywords.length, + }, + requestId, + source: "api/analyze-manual", + }); if (jobId) { await updateTimeline({ key: "finalize", @@ -186,7 +211,14 @@ export async function POST(request: NextRequest) { persisted = true } } catch (persistError) { - console.error("Failed to persist manual analysis:", persistError) + await logServer({ + level: "error", + message: "Failed to persist manual analysis", + labels: ["api", "analyze-manual", "persist", "error"], + payload: { error: String(persistError) }, + requestId, + source: "api/analyze-manual", + }); } } @@ -205,7 +237,17 @@ export async function POST(request: NextRequest) { }) } catch (error: any) { - console.error('โŒ Manual analysis error:', error) + await logServer({ + level: "error", + message: "Manual analysis error", + labels: ["api", "analyze-manual", "error"], + payload: { + message: error?.message, + stack: error?.stack, + }, + requestId: request.headers.get("x-request-id") ?? undefined, + source: "api/analyze-manual", + }); if (jobId) { try { diff --git a/app/api/analyze/route.ts b/app/api/analyze/route.ts index 5160ea5..e52899c 100644 --- a/app/api/analyze/route.ts +++ b/app/api/analyze/route.ts @@ -5,6 +5,7 @@ import { api } from "@/convex/_generated/api"; import { z } from 'zod' import { scrapeWebsite, ScrapingError } from '@/lib/scraper' import { performDeepAnalysis } from '@/lib/analysis-pipeline' +import { logServer } from "@/lib/server-logger"; const bodySchema = z.object({ url: z.string().min(1), @@ -20,6 +21,7 @@ export async function POST(request: NextRequest) { detail?: string }[] = [] try { + const requestId = request.headers.get("x-request-id") ?? undefined; if (!(await isAuthenticatedNextjs())) { const redirectUrl = new URL("/auth", request.url); const referer = request.headers.get("referer"); @@ -99,9 +101,26 @@ export async function POST(request: NextRequest) { ) } - console.log(`๐ŸŒ Scraping: ${url}`) + await logServer({ + level: "info", + message: "Scraping website", + labels: ["api", "analyze", "scrape"], + payload: { url }, + requestId, + source: "api/analyze", + }); const scrapedContent = await scrapeWebsite(url) - console.log(` โœ“ Scraped ${scrapedContent.headings.length} headings, ${scrapedContent.paragraphs.length} paragraphs`) + await logServer({ + level: "info", + message: "Scrape complete", + labels: ["api", "analyze", "scrape"], + payload: { + headings: scrapedContent.headings.length, + paragraphs: scrapedContent.paragraphs.length, + }, + requestId, + source: "api/analyze", + }); if (jobId) { await updateTimeline({ key: "scrape", @@ -111,7 +130,13 @@ export async function POST(request: NextRequest) { }) } - console.log('๐Ÿค– Starting enhanced analysis...') + await logServer({ + level: "info", + message: "Starting enhanced analysis", + labels: ["api", "analyze", "analysis"], + requestId, + source: "api/analyze", + }); const progressMap: Record = { features: 35, competitors: 50, @@ -128,7 +153,18 @@ export async function POST(request: NextRequest) { progress: progressMap[update.key] ?? 80, }) }) - console.log(` โœ“ Analysis complete: ${analysis.features.length} features, ${analysis.keywords.length} keywords, ${analysis.dorkQueries.length} queries`) + await logServer({ + level: "info", + message: "Analysis complete", + labels: ["api", "analyze", "analysis"], + payload: { + features: analysis.features.length, + keywords: analysis.keywords.length, + dorkQueries: analysis.dorkQueries.length, + }, + requestId, + source: "api/analyze", + }); if (jobId) { await updateTimeline({ key: "finalize", @@ -186,7 +222,14 @@ export async function POST(request: NextRequest) { persisted = true } } catch (persistError) { - console.error("Failed to persist analysis:", persistError) + await logServer({ + level: "error", + message: "Failed to persist analysis", + labels: ["api", "analyze", "persist", "error"], + payload: { error: String(persistError) }, + requestId, + source: "api/analyze", + }); } } @@ -205,7 +248,17 @@ export async function POST(request: NextRequest) { }) } catch (error: any) { - console.error('โŒ Analysis error:', error) + await logServer({ + level: "error", + message: "Analysis error", + labels: ["api", "analyze", "error"], + payload: { + message: error?.message, + stack: error?.stack, + }, + requestId: request.headers.get("x-request-id") ?? undefined, + source: "api/analyze", + }); if (jobId) { try { diff --git a/app/api/opportunities/route.ts b/app/api/opportunities/route.ts index aa7ef07..23761fa 100644 --- a/app/api/opportunities/route.ts +++ b/app/api/opportunities/route.ts @@ -6,6 +6,7 @@ import { z } from 'zod' import { generateSearchQueries, getDefaultPlatforms } from '@/lib/query-generator' import { executeSearches, scoreOpportunities } from '@/lib/search-executor' import type { EnhancedProductAnalysis, SearchConfig, PlatformConfig } from '@/lib/types' +import { logServer } from "@/lib/server-logger"; const searchSchema = z.object({ projectId: z.string(), @@ -23,12 +24,15 @@ const searchSchema = z.object({ })), strategies: z.array(z.string()), maxResults: z.number().default(50) + minAgeDays: z.number().min(0).max(365).optional(), + maxAgeDays: z.number().min(0).max(365).optional() }) }) export async function POST(request: NextRequest) { let jobId: string | undefined try { + const requestId = request.headers.get("x-request-id") ?? undefined; if (!(await isAuthenticatedNextjs())) { const redirectUrl = new URL("/auth", request.url); const referer = request.headers.get("referer"); @@ -41,9 +45,21 @@ export async function POST(request: NextRequest) { const parsed = searchSchema.parse(body) const { projectId, config } = parsed jobId = parsed.jobId + const ageFilters = { + minAgeDays: config.minAgeDays, + maxAgeDays: config.maxAgeDays, + } if (!process.env.SERPER_API_KEY) { const errorMessage = "SERPER_API_KEY is not configured. Add it to your environment to run searches." + await logServer({ + level: "warn", + message: "Serper API key missing", + labels: ["api", "opportunities", "config", "warn"], + payload: { projectId }, + requestId, + source: "api/opportunities", + }); if (jobId) { await fetchMutation( api.searchJobs.update, @@ -84,19 +100,43 @@ export async function POST(request: NextRequest) { const analysis = searchContext.context as EnhancedProductAnalysis - console.log('๐Ÿ” Starting opportunity search...') - console.log(` Product: ${analysis.productName}`) - console.log(` Platforms: ${config.platforms.filter(p => p.enabled).map(p => p.name).join(', ')}`) - console.log(` Strategies: ${config.strategies.join(', ')}`) + await logServer({ + level: "info", + message: "Starting opportunity search", + labels: ["api", "opportunities", "start"], + payload: { + projectId, + productName: analysis.productName, + platforms: config.platforms.filter((p) => p.enabled).map((p) => p.name), + strategies: config.strategies, + filters: ageFilters, + }, + requestId, + source: "api/opportunities", + }); // Generate queries - console.log(' Generating search queries...') + await logServer({ + level: "info", + message: "Generating search queries", + labels: ["api", "opportunities", "queries"], + payload: { projectId }, + requestId, + source: "api/opportunities", + }); const enforcedConfig: SearchConfig = { ...(config as SearchConfig), maxResults: Math.min((config as SearchConfig).maxResults || 50, 50), } const queries = generateSearchQueries(analysis as EnhancedProductAnalysis, enforcedConfig) - console.log(` โœ“ Generated ${queries.length} queries`) + await logServer({ + level: "info", + message: "Generated search queries", + labels: ["api", "opportunities", "queries"], + payload: { projectId, count: queries.length }, + requestId, + source: "api/opportunities", + }); if (jobId) { await fetchMutation( api.searchJobs.update, @@ -106,9 +146,23 @@ export async function POST(request: NextRequest) { } // Execute searches - console.log(' Executing searches...') - const searchResults = await executeSearches(queries) - console.log(` โœ“ Found ${searchResults.length} raw results`) + await logServer({ + level: "info", + message: "Executing searches", + labels: ["api", "opportunities", "search"], + payload: { projectId, queryCount: queries.length }, + requestId, + source: "api/opportunities", + }); + const searchResults = await executeSearches(queries, ageFilters) + await logServer({ + level: "info", + message: "Searches complete", + labels: ["api", "opportunities", "search"], + payload: { projectId, rawResults: searchResults.length }, + requestId, + source: "api/opportunities", + }); if (jobId) { await fetchMutation( api.searchJobs.update, @@ -130,9 +184,23 @@ export async function POST(request: NextRequest) { const filteredResults = searchResults.filter((result) => !existingSet.has(result.url)) // Score and rank - console.log(' Scoring opportunities...') + await logServer({ + level: "info", + message: "Scoring opportunities", + labels: ["api", "opportunities", "score"], + payload: { projectId, candidateResults: filteredResults.length }, + requestId, + source: "api/opportunities", + }); const opportunities = scoreOpportunities(filteredResults, analysis as EnhancedProductAnalysis) - console.log(` โœ“ Scored ${opportunities.length} opportunities`) + await logServer({ + level: "info", + message: "Opportunities scored", + labels: ["api", "opportunities", "score"], + payload: { projectId, scored: opportunities.length }, + requestId, + source: "api/opportunities", + }); if (jobId) { await fetchMutation( api.searchJobs.update, @@ -179,7 +247,14 @@ export async function POST(request: NextRequest) { } catch (error: any) { const errorMessage = error instanceof Error ? error.message : typeof error === "string" ? error : "Search failed" - console.error("โŒ Opportunity search error:", errorMessage) + await logServer({ + level: "error", + message: "Opportunity search error", + labels: ["api", "opportunities", "error"], + payload: { message: errorMessage }, + requestId: request.headers.get("x-request-id") ?? undefined, + source: "api/opportunities", + }); if (jobId) { try { diff --git a/app/api/search/route.ts b/app/api/search/route.ts index 7f44090..4aaff4c 100644 --- a/app/api/search/route.ts +++ b/app/api/search/route.ts @@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server' import { isAuthenticatedNextjs } from "@convex-dev/auth/nextjs/server"; import { z } from 'zod' import type { EnhancedProductAnalysis, Opportunity, DorkQuery } from '@/lib/types' +import { logServer } from "@/lib/server-logger"; +import { appendSerperAgeModifiers, SerperAgeFilter } from "@/lib/serper-date-filters"; // Search result from any source interface SearchResult { @@ -31,11 +33,14 @@ const bodySchema = z.object({ problem: z.string(), searchTerms: z.array(z.string()) })) - }) + }), + minAgeDays: z.number().min(0).max(365).optional(), + maxAgeDays: z.number().min(0).max(365).optional(), }) export async function POST(request: NextRequest) { try { + const requestId = request.headers.get("x-request-id") ?? undefined; if (!(await isAuthenticatedNextjs())) { const redirectUrl = new URL("/auth", request.url); const referer = request.headers.get("referer"); @@ -45,16 +50,34 @@ export async function POST(request: NextRequest) { } const body = await request.json() - const { analysis } = bodySchema.parse(body) + const { analysis, minAgeDays, maxAgeDays } = bodySchema.parse(body) + const ageFilters: SerperAgeFilter = { + minAgeDays, + maxAgeDays, + } if (!process.env.SERPER_API_KEY) { + await logServer({ + level: "warn", + message: "Serper API key missing", + labels: ["api", "search", "config", "warn"], + requestId, + source: "api/search", + }); return NextResponse.json( { error: 'SERPER_API_KEY is not configured. Add it to your environment to run searches.' }, { status: 400 } ) } - console.log(`๐Ÿ” Finding opportunities for: ${analysis.productName}`) + await logServer({ + level: "info", + message: "Finding opportunities", + labels: ["api", "search", "start"], + payload: { productName: analysis.productName, filters: ageFilters }, + requestId, + source: "api/search", + }); // Sort queries by priority const sortedQueries = analysis.dorkQueries @@ -69,22 +92,50 @@ export async function POST(request: NextRequest) { // Execute searches for (const query of sortedQueries) { try { - console.log(` Searching: ${query.query.substring(0, 60)}...`) - const results = await searchGoogle(query.query, 5) + await logServer({ + level: "info", + message: "Searching query", + labels: ["api", "search", "query"], + payload: { query: query.query, platform: query.platform }, + requestId, + source: "api/search", + }); + const results = await searchGoogle(query.query, 5, ageFilters, requestId) allResults.push(...results) // Small delay to avoid rate limiting await new Promise(r => setTimeout(r, 500)) } catch (e) { - console.error(` Search failed for query: ${query.query.substring(0, 40)}`) + await logServer({ + level: "error", + message: "Search failed for query", + labels: ["api", "search", "query", "error"], + payload: { query: query.query, error: String(e) }, + requestId, + source: "api/search", + }); } } - console.log(` Found ${allResults.length} raw results`) + await logServer({ + level: "info", + message: "Search complete", + labels: ["api", "search", "results"], + payload: { rawResults: allResults.length }, + requestId, + source: "api/search", + }); // Analyze and score opportunities const opportunities = await analyzeOpportunities(allResults, analysis as EnhancedProductAnalysis) - console.log(` โœ“ Analyzed ${opportunities.length} opportunities`) + await logServer({ + level: "info", + message: "Opportunities analyzed", + labels: ["api", "search", "analyze"], + payload: { analyzed: opportunities.length }, + requestId, + source: "api/search", + }); return NextResponse.json({ success: true, @@ -100,7 +151,18 @@ export async function POST(request: NextRequest) { }) } catch (error: any) { - console.error('โŒ Search error:', error) + await logServer({ + level: "error", + message: "Search error", + labels: ["api", "search", "error"], + payload: { + message: error?.message, + stack: error?.stack, + filters: ageFilters, + }, + requestId: request.headers.get("x-request-id") ?? undefined, + source: "api/search", + }); return NextResponse.json( { error: error.message || 'Failed to find opportunities' }, @@ -109,23 +171,42 @@ export async function POST(request: NextRequest) { } } -async function searchGoogle(query: string, num: number): Promise { - return searchSerper(query, num) +async function searchGoogle( + query: string, + num: number, + filters?: SerperAgeFilter, + requestId?: string +): Promise { + return searchSerper(query, num, filters, requestId) } -async function searchSerper(query: string, num: number): Promise { +async function searchSerper( + query: string, + num: number, + filters?: SerperAgeFilter, + requestId?: string +): Promise { + const filteredQuery = appendSerperAgeModifiers(query, filters) const response = await fetch('https://google.serper.dev/search', { method: 'POST', headers: { 'X-API-KEY': process.env.SERPER_API_KEY!, 'Content-Type': 'application/json' }, - body: JSON.stringify({ q: query, num }) + body: JSON.stringify({ q: filteredQuery, num }) }) if (!response.ok) throw new Error('Serper API error') const data = await response.json() + await logServer({ + level: "info", + message: "Serper response received", + labels: ["api", "search", "serper", "response"], + payload: { query: filteredQuery, num, filters, data }, + requestId, + source: "api/search", + }); return (data.organic || []).map((r: any) => ({ title: r.title, url: r.link, diff --git a/app/app/(app)/search/page.tsx b/app/app/(app)/search/page.tsx index 14cb8bd..3dcbf4b 100644 --- a/app/app/(app)/search/page.tsx +++ b/app/app/(app)/search/page.tsx @@ -209,6 +209,8 @@ export default function OpportunitiesPage() { 'competitor-alternative' ]) const [maxQueries, setMaxQueries] = useState(50) + const [minAgeDays, setMinAgeDays] = useState(0) + const [maxAgeDays, setMaxAgeDays] = useState(30) const [goalPreset, setGoalPreset] = useState('high-intent') const [isSearching, setIsSearching] = useState(false) const [opportunities, setOpportunities] = useState([]) @@ -351,6 +353,14 @@ export default function OpportunitiesPage() { })) ) } + if (typeof parsed.minAgeDays === 'number' || typeof parsed.maxAgeDays === 'number') { + const rawMin = typeof parsed.minAgeDays === 'number' ? Math.max(parsed.minAgeDays, 0) : 0 + const rawMax = typeof parsed.maxAgeDays === 'number' ? Math.max(parsed.maxAgeDays, 0) : 30 + const normalizedMin = Math.min(rawMin, rawMax) + const normalizedMax = Math.max(rawMax, normalizedMin) + setMinAgeDays(normalizedMin) + setMaxAgeDays(normalizedMax) + } } catch { // Ignore invalid cached config. } @@ -380,9 +390,11 @@ export default function OpportunitiesPage() { strategies, maxQueries, platforms, + minAgeDays, + maxAgeDays, } localStorage.setItem(key, JSON.stringify(payload)) - }, [selectedProjectId, goalPreset, strategies, maxQueries, platforms]) + }, [selectedProjectId, goalPreset, strategies, maxQueries, platforms, minAgeDays, maxAgeDays]) useEffect(() => { if (!analysis && latestAnalysis === null) { @@ -483,7 +495,9 @@ export default function OpportunitiesPage() { searchTemplate: platform.searchTemplate ?? "", })), strategies, - maxResults: Math.min(maxQueries, 50) + maxResults: Math.min(maxQueries, 50), + minAgeDays: minAgeDays > 0 ? minAgeDays : undefined, + maxAgeDays: maxAgeDays > 0 ? maxAgeDays : undefined, } setLastSearchConfig(config as SearchConfig) @@ -769,6 +783,9 @@ export default function OpportunitiesPage() { ยท max {maxQueries} queries +
+ Lead age window: {minAgeDays === 0 ? 'newest' : `${minAgeDays}+ days old`} โ€“ {maxAgeDays > 0 ? `up to ${maxAgeDays} days` : 'any age'} +
{platforms.filter(p => p.enabled).length === 0 && (

Select at least one source to search.

)} @@ -835,6 +852,42 @@ export default function OpportunitiesPage() { ))} +
+ +

+ Restrict opportunities by lead age. Set a maximum age to avoid archived threads and an optional minimum age to skip brand-new posts. +

+
+
+ Min age (older than) + {minAgeDays} day{minAgeDays === 1 ? '' : 's'} +
+ { + const limited = maxAgeDays > 0 ? Math.min(value, maxAgeDays) : value + setMinAgeDays(limited) + }} + min={0} + max={365} + step={1} + /> +
+ Max age (newer than) + {maxAgeDays > 0 ? `${maxAgeDays} days` : 'Any'} +
+ { + const nextMax = Math.max(value, minAgeDays) + setMaxAgeDays(nextMax) + }} + min={0} + max={365} + step={1} + /> +
+
diff --git a/convex/_generated/api.d.ts b/convex/_generated/api.d.ts index b5dce3b..abc21c3 100644 --- a/convex/_generated/api.d.ts +++ b/convex/_generated/api.d.ts @@ -14,6 +14,7 @@ import type * as analysisSections from "../analysisSections.js"; import type * as auth from "../auth.js"; import type * as dataSources from "../dataSources.js"; import type * as http from "../http.js"; +import type * as logs from "../logs.js"; import type * as opportunities from "../opportunities.js"; import type * as projects from "../projects.js"; import type * as searchJobs from "../searchJobs.js"; @@ -33,6 +34,7 @@ declare const fullApi: ApiFromModules<{ auth: typeof auth; dataSources: typeof dataSources; http: typeof http; + logs: typeof logs; opportunities: typeof opportunities; projects: typeof projects; searchJobs: typeof searchJobs; diff --git a/convex/logs.ts b/convex/logs.ts new file mode 100644 index 0000000..0e80ede --- /dev/null +++ b/convex/logs.ts @@ -0,0 +1,32 @@ +import { mutation } from "./_generated/server"; +import { v } from "convex/values"; +import { getAuthUserId } from "@convex-dev/auth/server"; + +export const createLog = mutation({ + args: { + level: v.union( + v.literal("debug"), + v.literal("info"), + v.literal("warn"), + v.literal("error") + ), + message: v.string(), + labels: v.array(v.string()), + payload: v.optional(v.any()), + source: v.optional(v.string()), + requestId: v.optional(v.string()), + projectId: v.optional(v.id("projects")), + }, + handler: async (ctx, args) => { + const userId = await getAuthUserId(ctx); + const base = { + ...args, + createdAt: Date.now(), + }; + if (userId) { + await ctx.db.insert("logs", { ...base, userId }); + return; + } + await ctx.db.insert("logs", base); + }, +}); diff --git a/convex/schema.ts b/convex/schema.ts index 30682ac..8e28d78 100644 --- a/convex/schema.ts +++ b/convex/schema.ts @@ -221,6 +221,24 @@ const schema = defineSchema({ }) .index("by_project_status", ["projectId", "status"]) .index("by_project_createdAt", ["projectId", "createdAt"]), + logs: defineTable({ + level: v.union( + v.literal("debug"), + v.literal("info"), + v.literal("warn"), + v.literal("error") + ), + message: v.string(), + labels: v.array(v.string()), + payload: v.optional(v.any()), + source: v.optional(v.string()), + requestId: v.optional(v.string()), + projectId: v.optional(v.id("projects")), + userId: v.optional(v.id("users")), + createdAt: v.number(), + }) + .index("by_createdAt", ["createdAt"]) + .index("by_project_createdAt", ["projectId", "createdAt"]), }); export default schema; diff --git a/lib/analysis-pipeline.ts b/lib/analysis-pipeline.ts index fc51416..180014a 100644 --- a/lib/analysis-pipeline.ts +++ b/lib/analysis-pipeline.ts @@ -10,6 +10,7 @@ import type { Competitor, DorkQuery } from './types' +import { logServer } from "@/lib/server-logger"; const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY @@ -46,7 +47,13 @@ async function aiGenerate(prompt: string, systemPrompt: string, temperature: try { return JSON.parse(jsonStr) as T } catch (e) { - console.error('Failed to parse JSON:', jsonStr.substring(0, 200)) + await logServer({ + level: "error", + message: "Failed to parse JSON from AI response", + labels: ["analysis-pipeline", "ai", "error"], + payload: { sample: jsonStr.substring(0, 200) }, + source: "lib/analysis-pipeline", + }); throw new Error('Invalid JSON response from AI') } } @@ -490,60 +497,149 @@ export async function performDeepAnalysis( content: ScrapedContent, onProgress?: (update: AnalysisProgressUpdate) => void | Promise ): Promise { - console.log('๐Ÿ” Starting deep analysis...') + await logServer({ + level: "info", + message: "Starting deep analysis", + labels: ["analysis-pipeline", "start"], + source: "lib/analysis-pipeline", + }); - console.log(' ๐Ÿงญ Product profiling...') + await logServer({ + level: "info", + message: "Product profiling", + labels: ["analysis-pipeline", "profile"], + source: "lib/analysis-pipeline", + }); const productProfile = await extractProductProfile(content) - console.log(` โœ“ Profiled as ${productProfile.category} for ${productProfile.targetPersona} (conf ${productProfile.confidence})`) + await logServer({ + level: "info", + message: "Product profile complete", + labels: ["analysis-pipeline", "profile"], + payload: { + category: productProfile.category, + targetPersona: productProfile.targetPersona, + confidence: productProfile.confidence, + }, + source: "lib/analysis-pipeline", + }); - console.log(' ๐Ÿ“ฆ Pass 1: Features...') + await logServer({ + level: "info", + message: "Pass 1: Features", + labels: ["analysis-pipeline", "features"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "features", status: "running" }) const features = await extractFeatures(content) - console.log(` โœ“ ${features.length} features`) + await logServer({ + level: "info", + message: "Features extracted", + labels: ["analysis-pipeline", "features"], + payload: { count: features.length }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "features", status: "completed", detail: `${features.length} features` }) - console.log(' ๐Ÿ† Pass 2: Competitors...') + await logServer({ + level: "info", + message: "Pass 2: Competitors", + labels: ["analysis-pipeline", "competitors"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "competitors", status: "running" }) const candidateSet = await generateCompetitorCandidates(productProfile) const competitors = await selectDirectCompetitors(productProfile, candidateSet.candidates) - console.log(` โœ“ ${competitors.length} competitors: ${competitors.map(c => c.name).join(', ')}`) + await logServer({ + level: "info", + message: "Competitors extracted", + labels: ["analysis-pipeline", "competitors"], + payload: { count: competitors.length, names: competitors.map((c) => c.name) }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "competitors", status: "completed", detail: `${competitors.length} competitors: ${competitors.map(c => c.name).join(', ')}` }) - console.log(' ๐Ÿ”‘ Pass 3: Keywords...') + await logServer({ + level: "info", + message: "Pass 3: Keywords", + labels: ["analysis-pipeline", "keywords"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "keywords", status: "running" }) const keywords = await generateKeywords(features, content, competitors) - console.log(` โœ“ ${keywords.length} keywords (${keywords.filter(k => k.type === 'differentiator').length} differentiators)`) + await logServer({ + level: "info", + message: "Keywords extracted", + labels: ["analysis-pipeline", "keywords"], + payload: { + count: keywords.length, + differentiators: keywords.filter((k) => k.type === "differentiator").length, + }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "keywords", status: "completed", detail: `${keywords.length} keywords (${keywords.filter(k => k.type === 'differentiator').length} differentiators)` }) - console.log(' ๐ŸŽฏ Pass 4: Problems...') + await logServer({ + level: "info", + message: "Pass 4: Problems", + labels: ["analysis-pipeline", "problems"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "problems", status: "running" }) const problems = await identifyProblems(features, content) const personas = await generatePersonas(content, problems) - console.log(` โœ“ ${problems.length} problems, ${personas.length} personas`) + await logServer({ + level: "info", + message: "Problems and personas extracted", + labels: ["analysis-pipeline", "problems"], + payload: { problems: problems.length, personas: personas.length }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "problems", status: "completed", detail: `${problems.length} problems, ${personas.length} personas` }) - console.log(' ๐Ÿ’ก Pass 5: Use cases...') + await logServer({ + level: "info", + message: "Pass 5: Use cases", + labels: ["analysis-pipeline", "use-cases"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "useCases", status: "running" }) const useCases = await generateUseCases(features, personas, problems) - console.log(` โœ“ ${useCases.length} use cases`) + await logServer({ + level: "info", + message: "Use cases extracted", + labels: ["analysis-pipeline", "use-cases"], + payload: { count: useCases.length }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "useCases", status: "completed", detail: `${useCases.length} use cases` }) - console.log(' ๐Ÿ”Ž Pass 6: Dork queries...') + await logServer({ + level: "info", + message: "Pass 6: Dork queries", + labels: ["analysis-pipeline", "dork-queries"], + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "dorkQueries", status: "running" }) const dorkQueries = generateDorkQueries(keywords, problems, useCases, competitors) - console.log(` โœ“ ${dorkQueries.length} queries`) + await logServer({ + level: "info", + message: "Dork queries extracted", + labels: ["analysis-pipeline", "dork-queries"], + payload: { count: dorkQueries.length }, + source: "lib/analysis-pipeline", + }); await onProgress?.({ key: "dorkQueries", status: "completed", detail: `${dorkQueries.length} queries` }) const productName = content.title.split(/[\|\-โ€“โ€”:]/)[0].trim() diff --git a/lib/openai.ts b/lib/openai.ts index fb453c0..931b9a5 100644 --- a/lib/openai.ts +++ b/lib/openai.ts @@ -1,5 +1,6 @@ import OpenAI from 'openai' import type { ProductAnalysis, ScrapedContent, Opportunity } from './types' +import { logServer } from "@/lib/server-logger"; const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY @@ -111,7 +112,13 @@ export async function findOpportunities(analysis: ProductAnalysis): Promise const results = await searchSerper(query, num) if (results.length > 0) return results } catch (e) { - console.error('Serper search failed:', e) + await logServer({ + level: "error", + message: "Serper search failed", + labels: ["openai", "serper", "error"], + payload: { error: String(e) }, + source: "lib/openai", + }); } } @@ -234,6 +247,13 @@ async function searchSerper(query: string, num: number): Promise if (!response.ok) throw new Error('Serper API error') const data = await response.json() + await logServer({ + level: "info", + message: "Serper response received", + labels: ["openai", "serper", "response"], + payload: { query, num, data }, + source: "lib/openai", + }); return (data.organic || []).map((r: any) => ({ title: r.title, url: r.link, diff --git a/lib/query-generator.ts b/lib/query-generator.ts index 4bcfd62..259287a 100644 --- a/lib/query-generator.ts +++ b/lib/query-generator.ts @@ -5,6 +5,7 @@ import type { SearchStrategy, PlatformId } from './types' +import { logServer } from "@/lib/server-logger"; export function getDefaultPlatforms(): Record { return { @@ -77,9 +78,13 @@ export function generateSearchQueries( const deduped = sortAndDedupeQueries(queries) const limited = deduped.slice(0, config.maxResults || 50) - console.info( - `[opportunities] queries: generated=${queries.length} deduped=${deduped.length} limited=${limited.length}` - ) + void logServer({ + level: "info", + message: "Search queries generated", + labels: ["query-generator", "queries"], + payload: { generated: queries.length, deduped: deduped.length, limited: limited.length }, + source: "lib/query-generator", + }); return limited } diff --git a/lib/scraper.ts b/lib/scraper.ts index 3a47ef9..c6c2c8b 100644 --- a/lib/scraper.ts +++ b/lib/scraper.ts @@ -1,5 +1,6 @@ import puppeteer from 'puppeteer' import type { ScrapedContent } from './types' +import { logServer } from "@/lib/server-logger"; export class ScrapingError extends Error { constructor(message: string, public code: string) { @@ -94,7 +95,13 @@ export async function scrapeWebsite(url: string): Promise { } } catch (error: any) { - console.error('Scraping error:', error) + await logServer({ + level: "error", + message: "Scraping error", + labels: ["scraper", "error"], + payload: { url: validatedUrl, error: String(error) }, + source: "lib/scraper", + }); if (error.message?.includes('ERR_NAME_NOT_RESOLVED') || error.message?.includes('net::ERR')) { throw new ScrapingError( diff --git a/lib/search-executor.ts b/lib/search-executor.ts index c0da2ee..32bb2e7 100644 --- a/lib/search-executor.ts +++ b/lib/search-executor.ts @@ -1,4 +1,6 @@ import type { GeneratedQuery, Opportunity, EnhancedProductAnalysis } from './types' +import { logServer } from "@/lib/server-logger"; +import { appendSerperAgeModifiers, SerperAgeFilter } from "@/lib/serper-date-filters"; interface SearchResult { title: string @@ -10,6 +12,7 @@ interface SearchResult { export async function executeSearches( queries: GeneratedQuery[], + filters?: SerperAgeFilter, onProgress?: (progress: { current: number; total: number; platform: string }) => void ): Promise { const results: SearchResult[] = [] @@ -24,11 +27,17 @@ export async function executeSearches( let completed = 0 for (const [platform, platformQueries] of byPlatform) { - console.log(`Searching ${platform}: ${platformQueries.length} queries`) + await logServer({ + level: "info", + message: "Searching platform", + labels: ["search-executor", "platform", "start"], + payload: { platform, queries: platformQueries.length }, + source: "lib/search-executor", + }); for (const query of platformQueries) { try { - const searchResults = await executeSingleSearch(query) + const searchResults = await executeSingleSearch(query, filters) results.push(...searchResults) completed++ @@ -37,7 +46,13 @@ export async function executeSearches( // Rate limiting - 1 second between requests await delay(1000) } catch (err) { - console.error(`Search failed for ${platform}:`, err) + await logServer({ + level: "error", + message: "Search failed for platform", + labels: ["search-executor", "platform", "error"], + payload: { platform, error: String(err) }, + source: "lib/search-executor", + }); } } } @@ -45,15 +60,15 @@ export async function executeSearches( return results } -async function executeSingleSearch(query: GeneratedQuery): Promise { +async function executeSingleSearch(query: GeneratedQuery, filters?: SerperAgeFilter): Promise { if (!process.env.SERPER_API_KEY) { throw new Error('SERPER_API_KEY is not configured.') } - return searchWithSerper(query) + return searchWithSerper(query, filters) } -async function searchWithSerper(query: GeneratedQuery): Promise { +async function searchWithSerper(query: GeneratedQuery, filters?: SerperAgeFilter): Promise { const response = await fetch('https://google.serper.dev/search', { method: 'POST', headers: { @@ -61,7 +76,7 @@ async function searchWithSerper(query: GeneratedQuery): Promise 'Content-Type': 'application/json' }, body: JSON.stringify({ - q: query.query, + q: appendSerperAgeModifiers(query.query, filters), num: 5, gl: 'us', hl: 'en' @@ -73,6 +88,13 @@ async function searchWithSerper(query: GeneratedQuery): Promise } const data = await response.json() + await logServer({ + level: "info", + message: "Serper response received", + labels: ["search-executor", "serper", "response"], + payload: { query: query.query, platform: query.platform, data }, + source: "lib/search-executor", + }); return (data.organic || []).map((r: any) => ({ title: r.title, diff --git a/lib/serper-date-filters.ts b/lib/serper-date-filters.ts new file mode 100644 index 0000000..27eff85 --- /dev/null +++ b/lib/serper-date-filters.ts @@ -0,0 +1,28 @@ +export type SerperAgeFilter = { + maxAgeDays?: number + minAgeDays?: number +} + +const normalizeDays = (value?: number) => { + if (typeof value !== 'number') return undefined + if (!Number.isFinite(value)) return undefined + if (value <= 0) return undefined + return Math.floor(value) +} + +export function appendSerperAgeModifiers(query: string, filters?: SerperAgeFilter): string { + if (!filters) return query + const modifiers: string[] = [] + const normalizedMax = normalizeDays(filters.maxAgeDays) + const normalizedMin = normalizeDays(filters.minAgeDays) + + if (typeof normalizedMax === 'number') { + modifiers.push(`newer_than:${normalizedMax}d`) + } + if (typeof normalizedMin === 'number') { + modifiers.push(`older_than:${normalizedMin}d`) + } + + if (modifiers.length === 0) return query + return `${query} ${modifiers.join(' ')}` +} diff --git a/lib/server-logger.ts b/lib/server-logger.ts new file mode 100644 index 0000000..d439705 --- /dev/null +++ b/lib/server-logger.ts @@ -0,0 +1,57 @@ +import { fetchMutation } from "convex/nextjs"; +import { api } from "@/convex/_generated/api"; +import type { Id } from "@/convex/_generated/dataModel"; + +type LogLevel = "debug" | "info" | "warn" | "error"; + +type LogParams = { + level: LogLevel; + message: string; + labels: string[]; + payload?: unknown; + source?: string; + requestId?: string; + projectId?: Id<"projects">; + token?: string; +}; + +function writeConsole(level: LogLevel, message: string, payload?: unknown) { + const tag = `[${level}]`; + if (payload === undefined) { + if (level === "error") { + console.error(tag, message); + return; + } + if (level === "warn") { + console.warn(tag, message); + return; + } + console.log(tag, message); + return; + } + if (level === "error") { + console.error(tag, message, payload); + return; + } + if (level === "warn") { + console.warn(tag, message, payload); + return; + } + console.log(tag, message, payload); +} + +export async function logServer({ + token, + ...args +}: LogParams): Promise { + writeConsole(args.level, args.message, args.payload); + try { + if (token) { + await fetchMutation(api.logs.createLog, args, { token }); + return; + } + await fetchMutation(api.logs.createLog, args); + } catch (error) { + console.error("[logger] Failed to write log", error); + } +} diff --git a/lib/types.ts b/lib/types.ts index 6f1f5df..b64dd5e 100644 --- a/lib/types.ts +++ b/lib/types.ts @@ -26,6 +26,8 @@ export interface SearchConfig { platforms: PlatformConfig[] strategies: SearchStrategy[] maxResults: number + minAgeDays?: number + maxAgeDays?: number timeFilter?: 'past-day' | 'past-week' | 'past-month' | 'past-year' | 'all' } diff --git a/next.config.js b/next.config.js index 461b31c..a578ad3 100644 --- a/next.config.js +++ b/next.config.js @@ -1,5 +1,6 @@ /** @type {import('next').NextConfig} */ const nextConfig = { + output: 'standalone', experimental: { serverComponentsExternalPackages: ['puppeteer'] }