Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,23 @@ DATABASE_URL="file:./prisma/dev.db"
# ── Import options ──────────────────────────────────────────────────
# Automatically trigger AI categorization after a successful import.
# AUTO_CATEGORIZE_AFTER_IMPORT=true

# ── OpenAI-compatible provider ──────────────────────────────────────
# Use ANY provider with an OpenAI-compatible chat completions API.
# Works with: Ollama, llama.cpp, vLLM, LM Studio, Together AI, Groq,
# Fireworks, Deepseek, Mistral, LocalAI, text-generation-webui, etc.
#
# Base URL (required) — the /v1 endpoint of your provider:
# Ollama: http://localhost:11434/v1
# LM Studio: http://localhost:1234/v1
# llama.cpp: http://localhost:8080/v1
# vLLM: http://localhost:8000/v1
# Together AI: https://api.together.xyz/v1
# Groq: https://api.groq.com/openai/v1
# OPENAI_COMPATIBLE_BASE_URL=http://localhost:11434/v1

# API key (optional — most local servers don't need one)
# OPENAI_COMPATIBLE_API_KEY=

# Model name is configured in the Settings UI (not here) since it
# depends on what's available at your endpoint.
106 changes: 102 additions & 4 deletions app/api/settings/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,26 @@ const ALLOWED_MINIMAX_MODELS = [
'MiniMax-M2.5-highspeed',
] as const

const ALLOWED_PROVIDERS = ['anthropic', 'openai', 'minimax', 'openai_compatible'] as const

export async function GET(): Promise<NextResponse> {
try {
const [anthropic, anthropicModel, provider, openai, openaiModel, minimax, minimaxModel, xClientId, xClientSecret, obsidianVault] = await Promise.all([
const [
anthropic, anthropicModel, provider, openai, openaiModel,
minimax, minimaxModel,
openaiCompatibleApiKey, openaiCompatibleBaseUrl, openaiCompatibleModel,
xClientId, xClientSecret, obsidianVault,
] = await Promise.all([
prisma.setting.findUnique({ where: { key: 'anthropicApiKey' } }),
prisma.setting.findUnique({ where: { key: 'anthropicModel' } }),
prisma.setting.findUnique({ where: { key: 'aiProvider' } }),
prisma.setting.findUnique({ where: { key: 'openaiApiKey' } }),
prisma.setting.findUnique({ where: { key: 'openaiModel' } }),
prisma.setting.findUnique({ where: { key: 'minimaxApiKey' } }),
prisma.setting.findUnique({ where: { key: 'minimaxModel' } }),
prisma.setting.findUnique({ where: { key: 'openaiCompatibleApiKey' } }),
prisma.setting.findUnique({ where: { key: 'openaiCompatibleBaseUrl' } }),
prisma.setting.findUnique({ where: { key: 'openaiCompatibleModel' } }),
prisma.setting.findUnique({ where: { key: 'x_oauth_client_id' } }),
prisma.setting.findUnique({ where: { key: 'x_oauth_client_secret' } }),
prisma.setting.findUnique({ where: { key: 'obsidianVaultPath' } }),
Expand All @@ -55,6 +65,12 @@ export async function GET(): Promise<NextResponse> {
minimaxApiKey: maskKey(minimax?.value ?? null),
hasMinimaxKey: minimax !== null,
minimaxModel: minimaxModel?.value ?? 'MiniMax-M2.7',
// OpenAI-compatible provider settings
openaiCompatibleApiKey: maskKey(openaiCompatibleApiKey?.value ?? null),
hasOpenaiCompatibleKey: !!openaiCompatibleApiKey?.value,
openaiCompatibleBaseUrl: openaiCompatibleBaseUrl?.value ?? '',
openaiCompatibleModel: openaiCompatibleModel?.value ?? '',
// X OAuth
xOAuthClientId: maskKey(xClientId?.value ?? null),
xOAuthClientSecret: maskKey(xClientSecret?.value ?? null),
hasXOAuth: !!xClientId?.value,
Expand All @@ -78,6 +94,9 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
openaiModel?: string
minimaxApiKey?: string
minimaxModel?: string
openaiCompatibleApiKey?: string
openaiCompatibleBaseUrl?: string
openaiCompatibleModel?: string
xOAuthClientId?: string
xOAuthClientSecret?: string
obsidianVaultPath?: string
Expand All @@ -88,11 +107,15 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
}

const { anthropicApiKey, anthropicModel, provider, openaiApiKey, openaiModel, minimaxApiKey, minimaxModel } = body
const {
anthropicApiKey, anthropicModel, provider, openaiApiKey, openaiModel,
minimaxApiKey, minimaxModel,
openaiCompatibleApiKey, openaiCompatibleBaseUrl, openaiCompatibleModel,
} = body

// Save provider if provided
if (provider !== undefined) {
if (provider !== 'anthropic' && provider !== 'openai' && provider !== 'minimax') {
if (!(ALLOWED_PROVIDERS as readonly string[]).includes(provider)) {
return NextResponse.json({ error: 'Invalid provider' }, { status: 400 })
}
await prisma.setting.upsert({
Expand Down Expand Up @@ -146,6 +169,77 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
return NextResponse.json({ saved: true })
}

// Save OpenAI-compatible model (free-form string, no allowlist)
if (openaiCompatibleModel !== undefined) {
if (typeof openaiCompatibleModel !== 'string') {
return NextResponse.json({ error: 'Invalid openaiCompatibleModel value' }, { status: 400 })
}
const trimmed = openaiCompatibleModel.trim()
if (!trimmed) {
return NextResponse.json({ error: 'Model name cannot be empty' }, { status: 400 })
}
await prisma.setting.upsert({
where: { key: 'openaiCompatibleModel' },
update: { value: trimmed },
create: { key: 'openaiCompatibleModel', value: trimmed },
})
invalidateSettingsCache()
return NextResponse.json({ saved: true })
}

// Save OpenAI-compatible base URL
if (openaiCompatibleBaseUrl !== undefined) {
if (typeof openaiCompatibleBaseUrl !== 'string') {
return NextResponse.json({ error: 'Invalid openaiCompatibleBaseUrl value' }, { status: 400 })
}
const trimmed = openaiCompatibleBaseUrl.trim()
if (!trimmed) {
return NextResponse.json({ error: 'Base URL cannot be empty' }, { status: 400 })
}
// Basic URL validation
try {
new URL(trimmed)
} catch {
return NextResponse.json({ error: 'Invalid URL format' }, { status: 400 })
}
await prisma.setting.upsert({
where: { key: 'openaiCompatibleBaseUrl' },
update: { value: trimmed },
create: { key: 'openaiCompatibleBaseUrl', value: trimmed },
})
invalidateSettingsCache()
return NextResponse.json({ saved: true })
}

// Save OpenAI-compatible API key
if (openaiCompatibleApiKey !== undefined) {
if (typeof openaiCompatibleApiKey !== 'string') {
return NextResponse.json({ error: 'Invalid openaiCompatibleApiKey value' }, { status: 400 })
}
const trimmed = openaiCompatibleApiKey.trim()
// Allow empty string to clear key (some local servers don't need one)
if (trimmed === '') {
await prisma.setting.deleteMany({ where: { key: 'openaiCompatibleApiKey' } })
invalidateSettingsCache()
return NextResponse.json({ saved: true })
}
try {
await prisma.setting.upsert({
where: { key: 'openaiCompatibleApiKey' },
update: { value: trimmed },
create: { key: 'openaiCompatibleApiKey', value: trimmed },
})
invalidateSettingsCache()
return NextResponse.json({ saved: true })
} catch (err) {
console.error('Settings POST (openai-compatible) error:', err)
return NextResponse.json(
{ error: `Failed to save: ${err instanceof Error ? err.message : String(err)}` },
{ status: 500 }
)
}
}

// Save Anthropic key if provided
if (anthropicApiKey !== undefined) {
if (typeof anthropicApiKey !== 'string' || anthropicApiKey.trim() === '') {
Expand Down Expand Up @@ -272,7 +366,11 @@ export async function DELETE(request: NextRequest): Promise<NextResponse> {
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
}

const allowed = ['anthropicApiKey', 'openaiApiKey', 'minimaxApiKey', 'x_oauth_client_id', 'x_oauth_client_secret']
const allowed = [
'anthropicApiKey', 'openaiApiKey', 'minimaxApiKey',
'openaiCompatibleApiKey', 'openaiCompatibleBaseUrl', 'openaiCompatibleModel',
'x_oauth_client_id', 'x_oauth_client_secret',
]
if (!body.key || !allowed.includes(body.key)) {
return NextResponse.json({ error: 'Invalid key' }, { status: 400 })
}
Expand Down
41 changes: 41 additions & 0 deletions app/api/settings/test/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import prisma from '@/lib/db'
import { resolveAnthropicClient, getCliAuthStatus } from '@/lib/claude-cli-auth'
import { resolveOpenAIClient } from '@/lib/openai-auth'
import { resolveMiniMaxClient } from '@/lib/minimax-auth'
import { resolveOpenAICompatibleClient } from '@/lib/openai-compatible-auth'

export async function POST(request: NextRequest): Promise<NextResponse> {
let body: { provider?: string } = {}
Expand Down Expand Up @@ -106,5 +107,45 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
}
}

if (provider === 'openai_compatible') {
// Get the configured model name
const modelSetting = await prisma.setting.findUnique({ where: { key: 'openaiCompatibleModel' } })
const modelName = modelSetting?.value?.trim()
if (!modelName) {
return NextResponse.json({ working: false, error: 'No model name configured. Set a model name in Settings.' })
}

let client
try {
client = await resolveOpenAICompatibleClient()
} catch (e) {
return NextResponse.json({ working: false, error: e instanceof Error ? e.message : 'Failed to create client' })
}

try {
await client.chat.completions.create({
model: modelName,
max_tokens: 5,
messages: [{ role: 'user', content: 'hi' }],
})
return NextResponse.json({ working: true })
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
let friendly: string
if (msg.includes('ECONNREFUSED') || msg.includes('ENOTFOUND')) {
friendly = 'Cannot connect to endpoint. Is the server running?'
} else if (msg.includes('401') || msg.includes('invalid_api_key')) {
friendly = 'Invalid API key'
} else if (msg.includes('403')) {
friendly = 'Key does not have permission'
} else if (msg.includes('404')) {
friendly = 'Model not found. Check the model name and endpoint URL.'
} else {
friendly = msg.slice(0, 150)
}
return NextResponse.json({ working: false, error: friendly })
}
}

return NextResponse.json({ error: 'Unknown provider' }, { status: 400 })
}
Loading