From 810775238dc1914c236734caef001a995dc5aaab Mon Sep 17 00:00:00 2001 From: Raju Bepary Date: Mon, 24 Nov 2025 09:43:52 +0600 Subject: [PATCH] feat: add support for custom OpenAI configurations --- .env.example | 3 +++ .../src/api/routes/copilot.controller.ts | 6 ++++-- .../src/agent/agent.graph.insert.service.ts | 5 ++++- .../src/agent/agent.graph.service.ts | 8 ++++++-- .../src/chat/load.tools.service.ts | 2 +- .../prisma/autopost/autopost.service.ts | 9 ++++++--- .../src/openai/openai.service.ts | 19 ++++++++++--------- 7 files changed, 34 insertions(+), 18 deletions(-) diff --git a/.env.example b/.env.example index 61d2a0206..fe378f481 100644 --- a/.env.example +++ b/.env.example @@ -78,6 +78,9 @@ MASTODON_CLIENT_SECRET="" # Misc Settings OPENAI_API_KEY="" +OPENAI_BASE_URL="https://openrouter.ai/api/v1" # Optional: Custom OpenAI-compatible base URL (e.g., for Ollama,OpenRouter, or other OpenAI-compatible APIs) +OPENAI_CHAT_MODEL="gpt-4.1" # Optional: Custom model name (defaults to gpt-4.1 if not specified) +OPENAI_IMAGE_MODEL="dall-e-3" NEXT_PUBLIC_DISCORD_SUPPORT="" NEXT_PUBLIC_POLOTNO="" # NOT_SECURED=false diff --git a/apps/backend/src/api/routes/copilot.controller.ts b/apps/backend/src/api/routes/copilot.controller.ts index ffd61391f..7cf4bc898 100644 --- a/apps/backend/src/api/routes/copilot.controller.ts +++ b/apps/backend/src/api/routes/copilot.controller.ts @@ -50,7 +50,8 @@ export class CopilotController { endpoint: '/copilot/chat', runtime: new CopilotRuntime(), serviceAdapter: new OpenAIAdapter({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', }), }); @@ -97,7 +98,8 @@ export class CopilotController { runtime, // properties: req.body.variables.properties, serviceAdapter: new OpenAIAdapter({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', }), }); diff --git a/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts b/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts index 1a60b7399..9305df37c 100644 --- a/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts +++ b/libraries/nestjs-libraries/src/agent/agent.graph.insert.service.ts @@ -10,8 +10,11 @@ import { PostsService } from '@gitroom/nestjs-libraries/database/prisma/posts/po const model = new ChatOpenAI({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4o-2024-08-06', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4o-2024-08-06', temperature: 0, + configuration: { + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', + }, }); interface WorkflowChannelsState { diff --git a/libraries/nestjs-libraries/src/agent/agent.graph.service.ts b/libraries/nestjs-libraries/src/agent/agent.graph.service.ts index d4697e377..36ac3e1d8 100644 --- a/libraries/nestjs-libraries/src/agent/agent.graph.service.ts +++ b/libraries/nestjs-libraries/src/agent/agent.graph.service.ts @@ -23,13 +23,17 @@ const toolNode = new ToolNode(tools); const model = new ChatOpenAI({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', temperature: 0.7, + configuration: { + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', + }, }); const dalle = new DallEAPIWrapper({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'dall-e-3', + model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3', + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', }); interface WorkflowChannelsState { diff --git a/libraries/nestjs-libraries/src/chat/load.tools.service.ts b/libraries/nestjs-libraries/src/chat/load.tools.service.ts index 15679f4b2..7e0760b87 100644 --- a/libraries/nestjs-libraries/src/chat/load.tools.service.ts +++ b/libraries/nestjs-libraries/src/chat/load.tools.service.ts @@ -85,7 +85,7 @@ export class LoadToolsService { )} `; }, - model: openai('gpt-4.1'), + model: openai(process.env.OPENAI_CHAT_MODEL || 'gpt-4.1'), tools, memory: new Memory({ storage: pStore, diff --git a/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts b/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts index 0b3c66d25..8d3b91b83 100644 --- a/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts +++ b/libraries/nestjs-libraries/src/database/prisma/autopost/autopost.service.ts @@ -33,15 +33,18 @@ interface WorkflowChannelsState { const model = new ChatOpenAI({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', temperature: 0.7, + configuration: { + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', + }, }); const dalle = new DallEAPIWrapper({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', - model: 'gpt-image-1', + model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3', + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', }); - const generateContent = z.object({ socialMediaPostContent: z .string() diff --git a/libraries/nestjs-libraries/src/openai/openai.service.ts b/libraries/nestjs-libraries/src/openai/openai.service.ts index 7e4778921..05e2c10e3 100644 --- a/libraries/nestjs-libraries/src/openai/openai.service.ts +++ b/libraries/nestjs-libraries/src/openai/openai.service.ts @@ -6,6 +6,7 @@ import { z } from 'zod'; const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY || 'sk-proj-', + baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1', }); const PicturePrompt = z.object({ @@ -23,7 +24,7 @@ export class OpenaiService { await openai.images.generate({ prompt, response_format: isUrl ? 'url' : 'b64_json', - model: 'dall-e-3', + model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3', ...(isVertical ? { size: '1024x1792' } : {}), }) ).data[0]; @@ -35,7 +36,7 @@ export class OpenaiService { return ( ( await openai.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -56,7 +57,7 @@ export class OpenaiService { return ( ( await openai.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -90,7 +91,7 @@ export class OpenaiService { ], n: 5, temperature: 1, - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', }), openai.chat.completions.create({ messages: [ @@ -106,7 +107,7 @@ export class OpenaiService { ], n: 5, temperature: 1, - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', }), ]) ).flatMap((p) => p.choices); @@ -144,7 +145,7 @@ export class OpenaiService { content, }, ], - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', }); const { content: articleContent } = websiteContent.choices[0].message; @@ -164,7 +165,7 @@ export class OpenaiService { const posts = ( await openai.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -197,7 +198,7 @@ export class OpenaiService { return ( ( await openai.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', messages: [ { role: 'system', @@ -233,7 +234,7 @@ export class OpenaiService { const parse = ( await openai.chat.completions.parse({ - model: 'gpt-4.1', + model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1', messages: [ { role: 'system',