diff --git a/.env.example b/.env.example index 14dc456e..60116eac 100644 --- a/.env.example +++ b/.env.example @@ -15,6 +15,9 @@ GOOGLE_API_KEY="" MISTRAL_MODEL_NAME="mistral-medium-latest" MISTRAL_API_KEY="" +OPENROUTER_MODEL_NAME="openai/gpt-4.1-mini" +OPENROUTER_API_KEY="" + # Auth Config BETTER_AUTH_SECRET="random-secret-key" # please use any long random string here diff --git a/README.md b/README.md index 6adfc937..8ece888f 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Snap a photo of any receipt or upload an invoice PDF, and TaxHacker will automat - **Auto-categorization**: Transactions are automatically sorted into relevant categories based on their content - **Item splitting**: Extract individual items from invoices and split them into separate transactions when needed - **Structured storage**: Everything gets saved in an organized database for easy filtering and retrieval -- **Customizable AI providers**: Choose from OpenAI, Google Gemini, or Mistral (local LLM support coming soon) +- **Customizable AI providers**: Choose from OpenAI, Google Gemini, Mistral and OpenRouter (local LLM support coming soon) TaxHacker works with a wide variety of documents, including store receipts, restaurant bills, invoices, bank statements, letters, even handwritten receipts. It handles any language and any currency with ease. @@ -163,6 +163,7 @@ You can also configure LLM provider settings in the application or via environme - **OpenAI**: `OPENAI_MODEL_NAME` and `OPENAI_API_KEY` - **Google Gemini**: `GOOGLE_MODEL_NAME` and `GOOGLE_API_KEY` - **Mistral**: `MISTRAL_MODEL_NAME` and `MISTRAL_API_KEY` +- **OpenRouter**: `OPENROUTER_MODEL_NAME` and `OPENROUTER_API_KEY` ## ⌨️ Local Development diff --git a/ai/providers/llmProvider.ts b/ai/providers/llmProvider.ts index ec56100a..e5c14082 100644 --- a/ai/providers/llmProvider.ts +++ b/ai/providers/llmProvider.ts @@ -3,7 +3,7 @@ import { ChatGoogleGenerativeAI } from "@langchain/google-genai" import { ChatMistralAI } from "@langchain/mistralai" import { BaseMessage, HumanMessage } from "@langchain/core/messages" -export type LLMProvider = "openai" | "google" | "mistral" +export type LLMProvider = "openai" | "google" | "mistral" | "openrouter" export interface LLMConfig { provider: LLMProvider @@ -50,6 +50,15 @@ async function requestLLMUnified(config: LLMConfig, req: LLMRequest): Promise
diff --git a/app/(auth)/actions.ts b/app/(auth)/actions.ts index 033f3c56..a095deee 100644 --- a/app/(auth)/actions.ts +++ b/app/(auth)/actions.ts @@ -16,7 +16,8 @@ export async function selfHostedGetStartedAction(formData: FormData) { const apiKeys = [ "openai_api_key", "google_api_key", - "mistral_api_key" + "mistral_api_key", + "openrouter_api_key" ] for (const key of apiKeys) { diff --git a/app/(auth)/self-hosted/page.tsx b/app/(auth)/self-hosted/page.tsx index a746efd3..f6f7c6ca 100644 --- a/app/(auth)/self-hosted/page.tsx +++ b/app/(auth)/self-hosted/page.tsx @@ -37,6 +37,7 @@ export default async function SelfHostedWelcomePage() { openai: config.ai.openaiApiKey ?? "", google: config.ai.googleApiKey ?? "", mistral: config.ai.mistralApiKey ?? "", + openrouter: config.ai.openrouterApiKey ?? "", } return ( diff --git a/components/settings/llm-settings-form.tsx b/components/settings/llm-settings-form.tsx index c4ab6b08..ca8d00d2 100644 --- a/components/settings/llm-settings-form.tsx +++ b/components/settings/llm-settings-form.tsx @@ -30,7 +30,7 @@ import { PROVIDERS } from "@/lib/llm-providers"; function getInitialProviderOrder(settings: Record) { let order: string[] = [] if (!settings.llm_providers) { - order = ['openai', 'google', 'mistral'] + order = ['openai', 'google', 'mistral', 'openrouter'] } else { order = settings.llm_providers.split(",").map(p => p.trim()) } diff --git a/forms/settings.ts b/forms/settings.ts index 08ab5997..7ce8f994 100644 --- a/forms/settings.ts +++ b/forms/settings.ts @@ -12,7 +12,9 @@ export const settingsFormSchema = z.object({ google_model_name: z.string().default("gemini-2.5-flash"), mistral_api_key: z.string().optional(), mistral_model_name: z.string().default("mistral-medium-latest"), - llm_providers: z.string().default('openai,google,mistral'), + openrouter_api_key: z.string().optional(), + openrouter_model_name: z.string().default("openai/gpt-4o-mini"), + llm_providers: z.string().default('openai,google,mistral,openrouter'), prompt_analyse_new_file: z.string().optional(), is_welcome_message_hidden: z.string().optional(), }) diff --git a/lib/config.ts b/lib/config.ts index e28579ea..a1eb670b 100644 --- a/lib/config.ts +++ b/lib/config.ts @@ -10,6 +10,8 @@ const envSchema = z.object({ GOOGLE_MODEL_NAME: z.string().default("gemini-2.5-flash"), MISTRAL_API_KEY: z.string().optional(), MISTRAL_MODEL_NAME: z.string().default("mistral-medium-latest"), + OPENROUTER_API_KEY: z.string().optional(), + OPENROUTER_MODEL_NAME: z.string().default("gpt-4o-mini"), BETTER_AUTH_SECRET: z .string() .min(16, "Auth secret must be at least 16 characters") @@ -59,6 +61,8 @@ const config = { googleModelName: env.GOOGLE_MODEL_NAME, mistralApiKey: env.MISTRAL_API_KEY, mistralModelName: env.MISTRAL_MODEL_NAME, + openrouterApiKey: env.OPENROUTER_API_KEY, + openrouterModelName: env.OPENROUTER_MODEL_NAME, }, auth: { secret: env.BETTER_AUTH_SECRET, diff --git a/lib/llm-providers.ts b/lib/llm-providers.ts index 8ffa670a..63f4cf6c 100644 --- a/lib/llm-providers.ts +++ b/lib/llm-providers.ts @@ -44,4 +44,19 @@ export const PROVIDERS = [ }, logo: "/logo/mistral.svg" }, + { + key: "openrouter", + label: "OpenRouter", + apiKeyName: "openrouter_api_key", + modelName: "openrouter_model_name", + defaultModelName: "openai/gpt-4.1-mini", + apiDoc: "https://openrouter.ai/keys", + apiDocLabel: "OpenRouter Keys", + placeholder: "sk-or-...", + help: { + url: "https://openrouter.ai/keys", + label: "OpenRouter Keys" + }, + logo: "/logo/openrouter.svg" + }, ] diff --git a/models/settings.ts b/models/settings.ts index e4cd71f8..460664bb 100644 --- a/models/settings.ts +++ b/models/settings.ts @@ -9,28 +9,37 @@ export type SettingsMap = Record * Helper to extract LLM provider settings from SettingsMap. */ export function getLLMSettings(settings: SettingsMap) { - const priorities = (settings.llm_providers || "openai,google,mistral").split(",").map(p => p.trim()).filter(Boolean) + const priorities = (settings.llm_providers || "openai,google,mistral,openrouter").split(",").map(p => p.trim()).filter(Boolean) const providers = priorities.map((provider) => { + const providerConfig = PROVIDERS.find(p => p.key === provider) + if (provider === "openai") { return { provider: provider as LLMProvider, apiKey: settings.openai_api_key || "", - model: settings.openai_model_name || PROVIDERS[0]['defaultModelName'], + model: settings.openai_model_name || providerConfig?.defaultModelName || "gpt-4o-mini", } } if (provider === "google") { return { provider: provider as LLMProvider, apiKey: settings.google_api_key || "", - model: settings.google_model_name || PROVIDERS[1]['defaultModelName'], + model: settings.google_model_name || providerConfig?.defaultModelName || "gemini-2.5-flash", } } if (provider === "mistral") { return { provider: provider as LLMProvider, apiKey: settings.mistral_api_key || "", - model: settings.mistral_model_name || PROVIDERS[2]['defaultModelName'], + model: settings.mistral_model_name || providerConfig?.defaultModelName || "mistral-medium-latest", + } + } + if (provider === "openrouter") { + return { + provider: provider as LLMProvider, + apiKey: settings.openrouter_api_key || "", + model: settings.openrouter_model_name || providerConfig?.defaultModelName || "openai/gpt-4.1-mini", } } return null diff --git a/public/logo/openrouter.svg b/public/logo/openrouter.svg new file mode 100644 index 00000000..e6cca2a8 --- /dev/null +++ b/public/logo/openrouter.svg @@ -0,0 +1 @@ +OpenRouter \ No newline at end of file