Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ MASTODON_CLIENT_SECRET=""

# Misc Settings
OPENAI_API_KEY=""
OPENAI_BASE_URL="https://openrouter.ai/api/v1" # Optional: Custom OpenAI-compatible base URL (e.g., for Ollama,OpenRouter, or other OpenAI-compatible APIs)
OPENAI_CHAT_MODEL="gpt-4.1" # Optional: Custom model name (defaults to gpt-4.1 if not specified)
OPENAI_IMAGE_MODEL="dall-e-3"
NEXT_PUBLIC_DISCORD_SUPPORT=""
NEXT_PUBLIC_POLOTNO=""
# NOT_SECURED=false
Expand Down
6 changes: 4 additions & 2 deletions apps/backend/src/api/routes/copilot.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ export class CopilotController {
endpoint: '/copilot/chat',
runtime: new CopilotRuntime(),
serviceAdapter: new OpenAIAdapter({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
}),
});

Expand Down Expand Up @@ -97,7 +98,8 @@ export class CopilotController {
runtime,
// properties: req.body.variables.properties,
serviceAdapter: new OpenAIAdapter({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
}),
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,11 @@ import { PostsService } from '@gitroom/nestjs-libraries/database/prisma/posts/po

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4o-2024-08-06',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4o-2024-08-06',
temperature: 0,
configuration: {
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
},
});

interface WorkflowChannelsState {
Expand Down
8 changes: 6 additions & 2 deletions libraries/nestjs-libraries/src/agent/agent.graph.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,17 @@ const toolNode = new ToolNode(tools);

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
temperature: 0.7,
configuration: {
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
},
});

const dalle = new DallEAPIWrapper({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'dall-e-3',
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
});

interface WorkflowChannelsState {
Expand Down
2 changes: 1 addition & 1 deletion libraries/nestjs-libraries/src/chat/load.tools.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ export class LoadToolsService {
)}
`;
},
model: openai('gpt-4.1'),
model: openai(process.env.OPENAI_CHAT_MODEL || 'gpt-4.1'),
tools,
memory: new Memory({
storage: pStore,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,18 @@ interface WorkflowChannelsState {

const model = new ChatOpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
temperature: 0.7,
configuration: {
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
},
});

const dalle = new DallEAPIWrapper({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
model: 'gpt-image-1',
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
});

const generateContent = z.object({
socialMediaPostContent: z
.string()
Expand Down
19 changes: 10 additions & 9 deletions libraries/nestjs-libraries/src/openai/openai.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { z } from 'zod';

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || 'sk-proj-',
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
});

const PicturePrompt = z.object({
Expand All @@ -23,7 +24,7 @@ export class OpenaiService {
await openai.images.generate({
prompt,
response_format: isUrl ? 'url' : 'b64_json',
model: 'dall-e-3',
model: process.env.OPENAI_IMAGE_MODEL || 'dall-e-3',
...(isVertical ? { size: '1024x1792' } : {}),
})
).data[0];
Expand All @@ -35,7 +36,7 @@ export class OpenaiService {
return (
(
await openai.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand All @@ -56,7 +57,7 @@ export class OpenaiService {
return (
(
await openai.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down Expand Up @@ -90,7 +91,7 @@ export class OpenaiService {
],
n: 5,
temperature: 1,
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
}),
openai.chat.completions.create({
messages: [
Expand All @@ -106,7 +107,7 @@ export class OpenaiService {
],
n: 5,
temperature: 1,
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
}),
])
).flatMap((p) => p.choices);
Expand Down Expand Up @@ -144,7 +145,7 @@ export class OpenaiService {
content,
},
],
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
});

const { content: articleContent } = websiteContent.choices[0].message;
Expand All @@ -164,7 +165,7 @@ export class OpenaiService {
const posts =
(
await openai.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down Expand Up @@ -197,7 +198,7 @@ export class OpenaiService {
return (
(
await openai.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down Expand Up @@ -233,7 +234,7 @@ export class OpenaiService {
const parse =
(
await openai.chat.completions.parse({
model: 'gpt-4.1',
model: process.env.OPENAI_CHAT_MODEL || 'gpt-4.1',
messages: [
{
role: 'system',
Expand Down
Loading