diff --git a/src/commands/auth.ts b/src/commands/auth.ts index e025455c2..612fa4fe1 100644 --- a/src/commands/auth.ts +++ b/src/commands/auth.ts @@ -11,20 +11,15 @@ * Also runs standalone (no compiled binary required): * DATABASE_URL=... bun run src/commands/auth.ts create "claude-desktop" * - * Both paths require DATABASE_URL or GBRAIN_DATABASE_URL (except `test`, - * which only hits the remote URL and doesn't need a local DB). + * DB-backed commands use the active gbrain config, so they work with both + * PGLite and Postgres. Env DATABASE_URL / GBRAIN_DATABASE_URL still works via + * loadConfig() for the direct-script path. `test` only hits a remote URL. */ -import postgres from 'postgres'; import { createHash, randomBytes } from 'crypto'; - -function getDatabaseUrl(requireDb: boolean): string | undefined { - const url = process.env.DATABASE_URL || process.env.GBRAIN_DATABASE_URL; - if (!url && requireDb) { - console.error('Set DATABASE_URL or GBRAIN_DATABASE_URL environment variable.'); - process.exit(1); - } - return url; -} +import { loadConfig, toEngineConfig } from '../core/config.ts'; +import { createEngine } from '../core/engine-factory.ts'; +import type { SqlQuery } from '../core/oauth-provider.ts'; +import { sqlQueryForEngine } from '../core/sql-query.ts'; function hashToken(token: string): string { return createHash('sha256').update(token).digest('hex'); @@ -36,19 +31,20 @@ function generateToken(): string { async function create(name: string) { if (!name) { console.error('Usage: auth create '); process.exit(1); } - const sql = postgres(getDatabaseUrl(true)!); const token = generateToken(); const hash = hashToken(token); try { - await sql` - INSERT INTO access_tokens (name, token_hash) - VALUES (${name}, ${hash}) - `; - console.log(`Token created for "${name}":\n`); - console.log(` ${token}\n`); - console.log('Save this token — it will not be shown again.'); - console.log(`Revoke with: bun run src/commands/auth.ts revoke "${name}"`); + await withConfiguredSql(async (sql) => { + await sql` + INSERT INTO access_tokens (name, token_hash) + VALUES (${name}, ${hash}) + `; + console.log(`Token created for "${name}":\n`); + console.log(` ${token}\n`); + console.log('Save this token — it will not be shown again.'); + console.log(`Revoke with: bun run src/commands/auth.ts revoke "${name}"`); + }); } catch (e: any) { if (e.code === '23505') { console.error(`A token named "${name}" already exists. Revoke it first or use a different name.`); @@ -56,14 +52,11 @@ async function create(name: string) { console.error('Error:', e.message); } process.exit(1); - } finally { - await sql.end(); } } async function list() { - const sql = postgres(getDatabaseUrl(true)!); - try { + await withConfiguredSql(async (sql) => { const rows = await sql` SELECT name, created_at, last_used_at, revoked_at FROM access_tokens @@ -82,27 +75,23 @@ async function list() { const status = r.revoked_at ? 'REVOKED' : 'active'; console.log(`${name} ${created} ${lastUsed} ${status}`); } - } finally { - await sql.end(); - } + }); } async function revoke(name: string) { if (!name) { console.error('Usage: auth revoke '); process.exit(1); } - const sql = postgres(getDatabaseUrl(true)!); - try { - const result = await sql` + await withConfiguredSql(async (sql) => { + const rows = await sql` UPDATE access_tokens SET revoked_at = now() WHERE name = ${name} AND revoked_at IS NULL + RETURNING 1 `; - if (result.count === 0) { + if (rows.length === 0) { console.error(`No active token found with name "${name}".`); process.exit(1); } console.log(`Token "${name}" revoked.`); - } finally { - await sql.end(); - } + }); } async function test(url: string, token: string) { @@ -230,26 +219,25 @@ async function revokeClient(clientId: string) { console.error('Usage: auth revoke-client '); process.exit(1); } - const sql = postgres(getDatabaseUrl(true)!); try { - // Atomic single-statement delete: no race window between count + delete. - // Postgres cascades to oauth_tokens and oauth_codes (FK ON DELETE CASCADE - // declared in src/schema.sql:370,382) before the transaction commits. - const rows = await sql` - DELETE FROM oauth_clients WHERE client_id = ${clientId} - RETURNING client_id, client_name - `; - if (rows.length === 0) { - console.error(`No client found with id "${clientId}"`); - process.exit(1); - } - console.log(`OAuth client revoked: "${rows[0].client_name}" (${clientId})`); - console.log('Tokens and authorization codes purged via cascade.'); + await withConfiguredSql(async (sql) => { + // Atomic single-statement delete: no race window between count + delete. + // Postgres cascades to oauth_tokens and oauth_codes (FK ON DELETE CASCADE + // declared in src/schema.sql:370,382) before the transaction commits. + const rows = await sql` + DELETE FROM oauth_clients WHERE client_id = ${clientId} + RETURNING client_id, client_name + `; + if (rows.length === 0) { + console.error(`No client found with id "${clientId}"`); + process.exit(1); + } + console.log(`OAuth client revoked: "${rows[0].client_name}" (${clientId})`); + console.log('Tokens and authorization codes purged via cascade.'); + }); } catch (e: any) { console.error('Error:', e.message); process.exit(1); - } finally { - await sql.end(); } } @@ -262,25 +250,41 @@ async function registerClient(name: string, args: string[]) { : ['client_credentials']; const scopes = scopesIdx >= 0 && args[scopesIdx + 1] ? args[scopesIdx + 1] : 'read'; - const sql = postgres(getDatabaseUrl(true)!); try { - const { GBrainOAuthProvider } = await import('../core/oauth-provider.ts'); - const provider = new GBrainOAuthProvider({ sql: sql as any }); - const { clientId, clientSecret } = await provider.registerClientManual( - name, grantTypes, scopes, [], - ); - console.log(`OAuth client registered: "${name}"\n`); - console.log(` Client ID: ${clientId}`); - console.log(` Client Secret: ${clientSecret}\n`); - console.log(` Grant types: ${grantTypes.join(', ')}`); - console.log(` Scopes: ${scopes}\n`); - console.log('Save the client secret — it will not be shown again.'); - console.log(`Revoke with: gbrain auth revoke-client "${clientId}"`); + await withConfiguredSql(async (sql) => { + const { GBrainOAuthProvider } = await import('../core/oauth-provider.ts'); + const provider = new GBrainOAuthProvider({ sql }); + const { clientId, clientSecret } = await provider.registerClientManual( + name, grantTypes, scopes, [], + ); + console.log(`OAuth client registered: "${name}"\n`); + console.log(` Client ID: ${clientId}`); + console.log(` Client Secret: ${clientSecret}\n`); + console.log(` Grant types: ${grantTypes.join(', ')}`); + console.log(` Scopes: ${scopes}\n`); + console.log('Save the client secret — it will not be shown again.'); + console.log(`Revoke with: gbrain auth revoke-client "${clientId}"`); + }); } catch (e: any) { console.error('Error:', e.message); process.exit(1); + } +} + +async function withConfiguredSql(fn: (sql: SqlQuery) => Promise): Promise { + const config = loadConfig(); + if (!config) { + console.error('No brain configured. Run `gbrain init` or set DATABASE_URL / GBRAIN_DATABASE_URL.'); + process.exit(1); + } + + const engineConfig = toEngineConfig(config); + const engine = await createEngine(engineConfig); + try { + await engine.connect(engineConfig); + return await fn(sqlQueryForEngine(engine)); } finally { - await sql.end(); + try { await engine.disconnect(); } catch { /* best-effort */ } } } diff --git a/src/commands/serve-http.ts b/src/commands/serve-http.ts index 87e7b505b..7993436f4 100644 --- a/src/commands/serve-http.ts +++ b/src/commands/serve-http.ts @@ -30,7 +30,7 @@ import { summarizeMcpParams } from '../mcp/dispatch.ts'; import { loadConfig } from '../core/config.ts'; import { buildError, serializeError } from '../core/errors.ts'; import { VERSION } from '../version.ts'; -import * as db from '../core/db.ts'; +import { sqlQueryForEngine } from '../core/sql-query.ts'; interface ServeHttpOptions { port: number; @@ -66,8 +66,10 @@ export async function runServeHttp(engine: BrainEngine, options: ServeHttpOption ); } - // Get raw SQL connection for OAuth provider - const sql = db.getConnection() as SqlQuery; + // Get an engine-aware SQL adapter for OAuth/admin infrastructure. PGLite + // does not populate db.ts's Postgres singleton, so this must route through + // the connected BrainEngine. + const sql = sqlQueryForEngine(engine) as SqlQuery; // Initialize OAuth provider. F12 cleanup: DCR-disable now flips a // constructor option instead of monkey-patching `_clientsStore` after @@ -478,25 +480,24 @@ export async function runServeHttp(engine: BrainEngine, options: ServeHttpOption const operation = req.query.operation as string; const status = req.query.status as string; - // Dynamic filtering via postgres.js tagged-template fragments. - // Each filter expands to either `AND col = $N` (parameterized) or - // an empty fragment. `WHERE 1=1` lets us always have a WHERE clause - // and unconditionally append AND-prefixed fragments — no string - // interpolation, no manual escaping, no sql.unsafe. - const agentFilter = agent && agent !== 'all' ? sql`AND token_name = ${agent}` : sql``; - const opFilter = operation && operation !== 'all' ? sql`AND operation = ${operation}` : sql``; - const statusFilter = status && status !== 'all' ? sql`AND status = ${status}` : sql``; + const agentFilter = agent && agent !== 'all' ? agent : null; + const opFilter = operation && operation !== 'all' ? operation : null; + const statusFilter = status && status !== 'all' ? status : null; const rows = await sql` SELECT id, token_name, COALESCE(agent_name, token_name) as agent_name, operation, latency_ms, status, params, error_message, created_at FROM mcp_request_log - WHERE 1=1 ${agentFilter} ${opFilter} ${statusFilter} + WHERE (${agentFilter}::text IS NULL OR token_name = ${agentFilter}) + AND (${opFilter}::text IS NULL OR operation = ${opFilter}) + AND (${statusFilter}::text IS NULL OR status = ${statusFilter}) ORDER BY created_at DESC LIMIT ${limit} OFFSET ${offset} `; const [countResult] = await sql` SELECT count(*)::int as total FROM mcp_request_log - WHERE 1=1 ${agentFilter} ${opFilter} ${statusFilter} + WHERE (${agentFilter}::text IS NULL OR token_name = ${agentFilter}) + AND (${opFilter}::text IS NULL OR operation = ${opFilter}) + AND (${statusFilter}::text IS NULL OR status = ${statusFilter}) `; res.json({ rows, total: (countResult as any).total, page, pages: Math.ceil((countResult as any).total / limit) }); } catch { diff --git a/src/core/config.ts b/src/core/config.ts index 6092e6228..5e56a5505 100644 --- a/src/core/config.ts +++ b/src/core/config.ts @@ -85,15 +85,20 @@ export function loadConfig(): GBrainConfig | null { if (!fileConfig && !dbUrl) return null; - // Infer engine type if not explicitly set - const inferredEngine: 'postgres' | 'pglite' = fileConfig?.engine - || (fileConfig?.database_path ? 'pglite' : 'postgres'); + // Infer engine type if not explicitly set. A DATABASE_URL-style env var is + // always a Postgres connection target and must override a file-backed PGLite + // engine selection; otherwise direct-script/operator paths can silently hit + // the local PGLite brain while claiming to use the env URL. + const inferredEngine: 'postgres' | 'pglite' = dbUrl + ? 'postgres' + : fileConfig?.engine || (fileConfig?.database_path ? 'pglite' : 'postgres'); // Merge: env vars override config file. READ only — never mutate process.env. const merged = { ...fileConfig, engine: inferredEngine, ...(dbUrl ? { database_url: dbUrl } : {}), + ...(dbUrl ? { database_path: undefined } : {}), ...(process.env.OPENAI_API_KEY ? { openai_api_key: process.env.OPENAI_API_KEY } : {}), ...(process.env.GBRAIN_EMBEDDING_MODEL ? { embedding_model: process.env.GBRAIN_EMBEDDING_MODEL } : {}), ...(process.env.GBRAIN_EMBEDDING_DIMENSIONS ? { embedding_dimensions: parseInt(process.env.GBRAIN_EMBEDDING_DIMENSIONS, 10) } : {}), diff --git a/src/core/oauth-provider.ts b/src/core/oauth-provider.ts index 5749b4ba4..d088951fb 100644 --- a/src/core/oauth-provider.ts +++ b/src/core/oauth-provider.ts @@ -28,8 +28,15 @@ import { hashToken, generateToken, isUndefinedColumnError } from './utils.ts'; // Types // --------------------------------------------------------------------------- -/** Raw SQL query function — works with both PGLite and postgres tagged templates */ -export type SqlQuery = (strings: TemplateStringsArray, ...values: unknown[]) => Promise[]>; +/** + * Minimal tagged SQL function used by OAuth/admin infrastructure. + * + * This is deliberately narrower than postgres.js's `sql` tag: values must be + * scalar bind parameters only. It does not support nested SQL fragments, + * sql.json(), sql.unsafe(), sql.begin(), or direct JS array binding. + */ +export type SqlValue = string | number | bigint | boolean | Date | null; +export type SqlQuery = (strings: TemplateStringsArray, ...values: SqlValue[]) => Promise[]>; /** * Convert a JS array to a PostgreSQL array literal for PGLite compat. diff --git a/src/core/sql-query.ts b/src/core/sql-query.ts new file mode 100644 index 000000000..d40e4b9e5 --- /dev/null +++ b/src/core/sql-query.ts @@ -0,0 +1,47 @@ +import type { BrainEngine } from './engine.ts'; +import type { SqlQuery, SqlValue } from './oauth-provider.ts'; + +/** + * Build a minimal tagged-template SQL adapter over the active BrainEngine. + * + * OAuth/admin code only needs scalar positional parameters plus returned rows. + * This is not a postgres.js compatibility layer: nested fragments, sql.json, + * sql.unsafe, sql.begin, and direct JS array binding are intentionally outside + * the contract. Using BrainEngine.executeRaw keeps the path engine-aware: + * Postgres goes through the connected postgres.js client, while PGLite goes + * through its embedded query API. + */ +export function sqlQueryForEngine(engine: BrainEngine): SqlQuery { + return async (strings: TemplateStringsArray, ...values: SqlValue[]) => { + for (const value of values) { + assertSqlValue(value); + } + const query = strings.reduce((acc, str, i) => { + return acc + str + (i < values.length ? `$${i + 1}` : ''); + }, ''); + return engine.executeRaw(query, values); + }; +} + +function assertSqlValue(value: unknown): asserts value is SqlValue { + if ( + value === null || + typeof value === 'string' || + typeof value === 'number' || + typeof value === 'bigint' || + typeof value === 'boolean' || + value instanceof Date + ) { + return; + } + + const kind = Array.isArray(value) + ? 'array' + : value && typeof (value as { then?: unknown }).then === 'function' + ? 'promise' + : typeof value; + throw new TypeError( + `sqlQueryForEngine only supports scalar bind values; got ${kind}. ` + + 'Use fixed SQL with scalar params, or add an explicit cross-engine helper.', + ); +} diff --git a/src/mcp/http-transport.ts b/src/mcp/http-transport.ts index 36c4cfd23..761004476 100644 --- a/src/mcp/http-transport.ts +++ b/src/mcp/http-transport.ts @@ -114,14 +114,15 @@ function resolveClientIp(req: Request, server: { requestIP: (r: Request) => { ad export async function startHttpTransport(opts: HttpTransportOptions) { const { port, engine } = opts; - // Fail-fast: HTTP transport requires Postgres because access_tokens / mcp_request_log - // only exist in the Postgres schema (see src/core/pglite-schema.ts:5-6). + // Legacy HTTP transport requires Postgres because it reaches into the + // postgres.js client directly. The main `gbrain serve --http` path uses + // src/commands/serve-http.ts and is engine-aware, including PGLite. if ((engine as { kind?: string }).kind !== 'postgres') { - console.error('Error: gbrain serve --http requires a Postgres engine for remote auth tokens.'); - console.error('PGLite is local-only by design (access_tokens table is Postgres-only).'); + console.error('Error: legacy HTTP transport requires a Postgres engine.'); + console.error('Use `gbrain serve --http` / src/commands/serve-http.ts for the engine-aware HTTP server.'); console.error('Either:'); console.error(' - Use stdio: gbrain serve'); - console.error(' - Migrate to Postgres: gbrain migrate --to supabase'); + console.error(' - Use the current serve-http.ts path for PGLite HTTP'); process.exit(1); } diff --git a/test/config-env.test.ts b/test/config-env.test.ts new file mode 100644 index 000000000..413bb4065 --- /dev/null +++ b/test/config-env.test.ts @@ -0,0 +1,43 @@ +import { mkdtempSync, rmSync } from 'fs'; +import { tmpdir } from 'os'; +import { join } from 'path'; +import { afterEach, describe, expect, test } from 'bun:test'; +import { loadConfig, saveConfig } from '../src/core/config.ts'; + +const ORIG_GBRAIN_HOME = process.env.GBRAIN_HOME; +const ORIG_DATABASE_URL = process.env.DATABASE_URL; +const ORIG_GBRAIN_DATABASE_URL = process.env.GBRAIN_DATABASE_URL; + +function restoreEnv() { + if (ORIG_GBRAIN_HOME === undefined) delete process.env.GBRAIN_HOME; + else process.env.GBRAIN_HOME = ORIG_GBRAIN_HOME; + if (ORIG_DATABASE_URL === undefined) delete process.env.DATABASE_URL; + else process.env.DATABASE_URL = ORIG_DATABASE_URL; + if (ORIG_GBRAIN_DATABASE_URL === undefined) delete process.env.GBRAIN_DATABASE_URL; + else process.env.GBRAIN_DATABASE_URL = ORIG_GBRAIN_DATABASE_URL; +} + +afterEach(() => { + restoreEnv(); +}); + +describe('loadConfig env database URL precedence', () => { + test('DATABASE_URL switches an existing PGLite file config to Postgres', () => { + const home = mkdtempSync(join(tmpdir(), 'gbrain-config-env-')); + process.env.GBRAIN_HOME = home; + delete process.env.GBRAIN_DATABASE_URL; + delete process.env.DATABASE_URL; + + try { + saveConfig({ engine: 'pglite', database_path: '/tmp/local-brain.pglite' }); + process.env.DATABASE_URL = 'postgres://user:pass@example.test:5432/gbrain'; + + const cfg = loadConfig(); + expect(cfg?.engine).toBe('postgres'); + expect(cfg?.database_url).toBe('postgres://user:pass@example.test:5432/gbrain'); + expect(cfg?.database_path).toBeUndefined(); + } finally { + rmSync(home, { recursive: true, force: true }); + } + }); +}); diff --git a/test/sql-query.test.ts b/test/sql-query.test.ts new file mode 100644 index 000000000..e38d0656b --- /dev/null +++ b/test/sql-query.test.ts @@ -0,0 +1,32 @@ +import { afterAll, beforeAll, describe, expect, test } from 'bun:test'; +import { PGLiteEngine } from '../src/core/pglite-engine.ts'; +import { sqlQueryForEngine } from '../src/core/sql-query.ts'; + +let engine: PGLiteEngine; + +beforeAll(async () => { + engine = new PGLiteEngine(); + await engine.connect({}); +}, 30_000); + +afterAll(async () => { + if (engine) await engine.disconnect(); +}); + +describe('sqlQueryForEngine', () => { + test('runs parameterized tagged-template SQL against PGLite', async () => { + const sql = sqlQueryForEngine(engine); + const rows = await sql`SELECT ${'pglite'}::text AS engine, ${3}::int AS count`; + expect(rows).toEqual([{ engine: 'pglite', count: 3 }]); + }); + + test('rejects postgres.js-style fragment/object values explicitly', async () => { + const sql = sqlQueryForEngine(engine); + await expect( + sql`SELECT ${(Promise.resolve([]) as any)}::text AS bad` + ).rejects.toThrow(/only supports scalar bind values/); + await expect( + sql`SELECT ${(['read', 'write'] as any)}::text[] AS bad` + ).rejects.toThrow(/only supports scalar bind values/); + }); +});