diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..506118a --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +# .env.local +OPENAI_API_KEY="sk-..." +GROQ_API_KEY="gsk_..." +GOOGLE_API_KEY="AIza..." +FIRECRAWL_API_KEY="..." \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..01ea385 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +# Usa una imagen base de Node.js 20 +FROM node:20-alpine AS base + +# Establece el directorio de trabajo +WORKDIR /app + +# Instala las dependencias necesarias para la compilación +RUN apk add --no-cache libc6-compat + +# Copia los archivos de manifiesto del paquete e instala las dependencias +COPY package*.json ./ +RUN npm install + +# Copia el resto de los archivos de la aplicación +COPY . . + +# Construye la aplicación +RUN npm run build + +# Expone el puerto en el que se ejecuta la aplicación +EXPOSE 3000 + +# Define el comando para iniciar la aplicación +CMD ["npm", "start"] diff --git a/README.md b/README.md index 9c8f724..145108f 100644 --- a/README.md +++ b/README.md @@ -43,13 +43,61 @@ npm run dev Visit http://localhost:3000 +--- + +## Docker + +You can also run Fireplexity in a Docker container: + +```bash +docker build -t fireplexity . +docker run --env-file .env.local -p 3000:3000 fireplexity +``` + +Make sure to set your API keys in `.env.local` before building the image. + +--- + +## LLM Providers + +Fireplexity supports multiple LLM providers. By default, it uses OpenAI, but you can switch to other providers (such as Fireworks, Together, or Groq) by setting the following environment variable in `.env.local`: + +``` +LLM_PROVIDER=openai # or fireworks, together, groq +``` + +You may need to provide additional API keys depending on the provider you choose. See the documentation for each provider for details. + +--- + +## Example Queries & Scraping Tips + +Fireplexity can scrape and answer questions about a wide range of topics using real-time web data. Here are some example queries you can try: + +- `Summarize the latest news about OpenAI.` +- `What are the top 3 competitors of Firecrawl?` +- `Show me the current stock price and chart for Tesla.` +- `Find recent reviews for the iPhone 15.` +- `Get the weather forecast for Madrid this week.` +- `What are the main findings from the latest Nature article on AI safety?` +- `Compare the pricing of AWS and Google Cloud in 2024.` + +**Tips for Optimal Results:** +- Be specific: Include names, dates, or sources if possible (e.g., "Summarize the latest Wired article on quantum computing"). +- Ask for summaries or comparisons to get concise, actionable answers. +- For financial data, mention the company name or ticker (e.g., "AAPL stock chart"). +- For academic or technical topics, reference the publication or author for more accurate scraping. +- If you want sources, add "with sources" or "with citations" to your query. + ## Tech Stack - **Firecrawl** - Web scraping API - **Next.js 15** - React framework -- **OpenAI** - GPT-4o-mini +- **OpenAI** - GPT-4o-mini (default, configurable) - **Vercel AI SDK** - Streaming - **TradingView** - Stock charts +- **Docker** - Containerized deployment +- **Multiple LLM Providers** - OpenAI, Fireworks, Together, Groq ## Deploy diff --git a/app/api/fire-cache/search/route.ts b/app/api/fire-cache/search/route.ts index 3f6fd24..4900565 100644 --- a/app/api/fire-cache/search/route.ts +++ b/app/api/fire-cache/search/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server' -import { createOpenAI } from '@ai-sdk/openai' +import { getLLMProvider, Provider } from '@/lib/llm-provider' import { streamText, generateText, createDataStreamResponse } from 'ai' import { detectCompanyTicker } from '@/lib/company-ticker-map' @@ -8,7 +8,7 @@ export async function POST(request: Request) { console.log(`[${requestId}] Fire Cache Search API called`) try { const body = await request.json() - const messages = body.messages || [] + const { messages = [], provider = 'groq', model = 'llama3-8b-8192' } = body const query = messages[messages.length - 1]?.content || body.query console.log(`[${requestId}] Query received:`, query) @@ -17,20 +17,14 @@ export async function POST(request: Request) { } const firecrawlApiKey = process.env.FIRECRAWL_API_KEY - const openaiApiKey = process.env.OPENAI_API_KEY - + const llmProvider = getLLMProvider(provider as Provider) + const llmModel = llmProvider(model) + console.log(`[${requestId}] Using provider: ${provider}, model: ${model}`) + console.log(`[${requestId}] LLM Model Object:`, llmModel) + if (!firecrawlApiKey) { return NextResponse.json({ error: 'Firecrawl API key not configured' }, { status: 500 }) } - - if (!openaiApiKey) { - return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 }) - } - - // Configure OpenAI with API key - const openai = createOpenAI({ - apiKey: openaiApiKey - }) // Always perform a fresh search for each query to ensure relevant results const isFollowUp = messages.length > 2 @@ -200,7 +194,7 @@ export async function POST(request: Request) { : `user: ${query}` const followUpPromise = generateText({ - model: openai('gpt-4o'), + model: llmModel, messages: [ { role: 'system', @@ -213,15 +207,15 @@ export async function POST(request: Request) { content: `Query: ${query}\n\nConversation context:\n${conversationPreview}\n\n${sources.length > 0 ? `Available sources about: ${sources.map((s: { title: string }) => s.title).join(', ')}\n\n` : ''}Generate 5 diverse follow-up questions that would help the user learn more about this topic from different angles.` } ], - temperature: 0.7, + temperature: 0.5, maxTokens: 150, }) // Stream the text generation const result = streamText({ - model: openai('gpt-4o'), + model: llmModel, messages: aiMessages, - temperature: 0.7, + temperature: 0.5, maxTokens: 2000 }) diff --git a/app/api/fireplexity/search/route.ts b/app/api/fireplexity/search/route.ts index 43e511c..f1d5e3c 100644 --- a/app/api/fireplexity/search/route.ts +++ b/app/api/fireplexity/search/route.ts @@ -1,5 +1,5 @@ import { NextResponse } from 'next/server' -import { createOpenAI } from '@ai-sdk/openai' +import { getLLMProvider, Provider } from '@/lib/llm-provider' import { streamText, generateText, createDataStreamResponse } from 'ai' import { detectCompanyTicker } from '@/lib/company-ticker-map' import { selectRelevantContent } from '@/lib/content-selection' @@ -10,8 +10,11 @@ export async function POST(request: Request) { console.log(`[${requestId}] Fireplexity Search API called`) try { const body = await request.json() - const messages = body.messages || [] + const { messages = [], provider = 'groq', model = 'llama3-8b-8192' } = body + // pick the requested LLM + const llmModel = getLLMProvider(provider as Provider)(model) const query = messages[messages.length - 1]?.content || body.query + console.log(`[${requestId}] Query received:`, query) if (!query) { @@ -30,11 +33,6 @@ export async function POST(request: Request) { return NextResponse.json({ error: 'OpenAI API key not configured' }, { status: 500 }) } - // Configure OpenAI with API key - const openai = createOpenAI({ - apiKey: openaiApiKey - }) - // Initialize Firecrawl const firecrawl = new FirecrawlApp({ apiKey: firecrawlApiKey }) @@ -170,13 +168,13 @@ export async function POST(request: Request) { ] } - // Start generating follow-up questions in parallel (before streaming answer) + // Start generating follow-up questions in parallel const conversationPreview = isFollowUp ? messages.map((m: { role: string; content: string }) => `${m.role}: ${m.content}`).join('\n\n') : `user: ${query}` const followUpPromise = generateText({ - model: openai('gpt-4o-mini'), + model: llmModel, messages: [ { role: 'system', @@ -195,7 +193,7 @@ export async function POST(request: Request) { // Stream the text generation const result = streamText({ - model: openai('gpt-4o-mini'), + model: llmModel, messages: aiMessages, temperature: 0.7, maxTokens: 2000 diff --git a/app/chat-interface.tsx b/app/chat-interface.tsx index 6422da5..55c4fa4 100644 --- a/app/chat-interface.tsx +++ b/app/chat-interface.tsx @@ -199,7 +199,7 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat )} - ))} + ))} )} @@ -614,7 +614,7 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat } }} placeholder="Ask a follow-up question..." - className="resize-none border-0 focus:ring-0 focus:outline-none bg-transparent placeholder:text-gray-400 dark:placeholder:text-gray-500 px-4 py-2 pr-2 shadow-none focus-visible:ring-0 focus-visible:border-0" + className="resize-none border-0 focus:ring-0 focus:outline-none bg-transparent placeholder:text-gray-500 dark:placeholder:text-gray-400 text-gray-900 dark:text-gray-100 px-4 py-2 pr-2 shadow-none focus-visible:ring-0 focus-visible:border-0" rows={1} style={{ minHeight: '36px', @@ -626,7 +626,8 @@ export function ChatInterface({ messages, sources, followUpQuestions, searchStat diff --git a/components/ui/button.tsx b/components/ui/button.tsx index 8f9c1c1..9e065ff 100644 --- a/components/ui/button.tsx +++ b/components/ui/button.tsx @@ -19,7 +19,7 @@ const buttonVariants = cva( ghost: "hover:bg-accent hover:text-accent-foreground", link: "text-primary underline-offset-4 hover:underline", code: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-[#36322F] text-[#fff] hover:bg-[#4a4542] disabled:bg-[#8c8885] disabled:hover:bg-[#8c8885] [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#171310,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(58,_33,_8,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#171310,_0px_1px_3px_0px_rgba(58,_33,_8,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#171310,_0px_1px_2px_0px_rgba(58,_33,_8,_30%)] disabled:shadow-none disabled:hover:translate-y-0 disabled:hover:scale-100", - orange: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-orange-500 text-white hover:bg-orange-300 dark:bg-orange-500 dark:hover:bg-orange-300 dark:text-white [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#c2410c,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(234,_88,_12,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#c2410c,_0px_1px_3px_0px_rgba(234,_88,_12,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#c2410c,_0px_1px_2px_0px_rgba(234,_88,_12,_30%)] disabled:shadow-none disabled:hover:translate-y-0 disabled:hover:scale-100", + orange: "h-9 px-4 rounded-[10px] text-sm font-medium items-center transition-all duration-200 disabled:cursor-not-allowed disabled:opacity-50 bg-orange-500 text-white hover:bg-orange-600 dark:bg-orange-500 dark:hover:bg-orange-400 dark:text-white disabled:bg-gray-300 disabled:text-gray-400 [box-shadow:inset_0px_-2.108433723449707px_0px_0px_#c2410c,_0px_1.2048193216323853px_6.325301647186279px_0px_rgba(234,_88,_12,_58%)] hover:translate-y-[1px] hover:scale-[0.98] hover:[box-shadow:inset_0px_-1px_0px_0px_#c2410c,_0px_1px_3px_0px_rgba(234,_88,_12,_40%)] active:translate-y-[2px] active:scale-[0.97] active:[box-shadow:inset_0px_1px_1px_0px_#c2410c,_0px_1px_2px_0px_rgba(234,_88,_12,_40%)]", }, size: { default: "h-10 px-4 py-2", diff --git a/components/ui/input.tsx b/components/ui/input.tsx index 0c2c6ff..aa28ef6 100644 --- a/components/ui/input.tsx +++ b/components/ui/input.tsx @@ -8,7 +8,7 @@ function Input({ className, type, ...props }: React.ComponentProps<"input">) { type={type} data-slot="input" className={cn( - "file:text-foreground placeholder:text-muted-foreground selection:bg-primary selection:text-primary-foreground dark:bg-input/30 border-input flex h-9 w-full min-w-0 rounded-md border bg-transparent px-3 py-1 text-base shadow-xs transition-[color,box-shadow] outline-none file:inline-flex file:h-7 file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:pointer-events-none disabled:cursor-not-allowed disabled:opacity-50 md:text-sm", + "file:text-foreground placeholder:text-gray-500 dark:placeholder:text-gray-400 selection:bg-primary selection:text-primary-foreground dark:bg-zinc-800 bg-white text-gray-900 dark:text-gray-100", "focus-visible:border-orange-400 focus-visible:ring-orange-400/20 focus-visible:ring-2", "aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40 aria-invalid:border-destructive", className diff --git a/components/ui/textarea.tsx b/components/ui/textarea.tsx index 7f21b5e..b0097f3 100644 --- a/components/ui/textarea.tsx +++ b/components/ui/textarea.tsx @@ -7,7 +7,7 @@ function Textarea({ className, ...props }: React.ComponentProps<"textarea">) {