Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
134 changes: 69 additions & 65 deletions src/commands/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,15 @@
* Also runs standalone (no compiled binary required):
* DATABASE_URL=... bun run src/commands/auth.ts create "claude-desktop"
*
* Both paths require DATABASE_URL or GBRAIN_DATABASE_URL (except `test`,
* which only hits the remote URL and doesn't need a local DB).
* DB-backed commands use the active gbrain config, so they work with both
* PGLite and Postgres. Env DATABASE_URL / GBRAIN_DATABASE_URL still works via
* loadConfig() for the direct-script path. `test` only hits a remote URL.
*/
import postgres from 'postgres';
import { createHash, randomBytes } from 'crypto';

function getDatabaseUrl(requireDb: boolean): string | undefined {
const url = process.env.DATABASE_URL || process.env.GBRAIN_DATABASE_URL;
if (!url && requireDb) {
console.error('Set DATABASE_URL or GBRAIN_DATABASE_URL environment variable.');
process.exit(1);
}
return url;
}
import { loadConfig, toEngineConfig } from '../core/config.ts';
import { createEngine } from '../core/engine-factory.ts';
import type { SqlQuery } from '../core/oauth-provider.ts';
import { sqlQueryForEngine } from '../core/sql-query.ts';

function hashToken(token: string): string {
return createHash('sha256').update(token).digest('hex');
Expand All @@ -36,34 +31,32 @@ function generateToken(): string {

async function create(name: string) {
if (!name) { console.error('Usage: auth create <name>'); process.exit(1); }
const sql = postgres(getDatabaseUrl(true)!);
const token = generateToken();
const hash = hashToken(token);

try {
await sql`
INSERT INTO access_tokens (name, token_hash)
VALUES (${name}, ${hash})
`;
console.log(`Token created for "${name}":\n`);
console.log(` ${token}\n`);
console.log('Save this token — it will not be shown again.');
console.log(`Revoke with: bun run src/commands/auth.ts revoke "${name}"`);
await withConfiguredSql(async (sql) => {
await sql`
INSERT INTO access_tokens (name, token_hash)
VALUES (${name}, ${hash})
`;
console.log(`Token created for "${name}":\n`);
console.log(` ${token}\n`);
console.log('Save this token — it will not be shown again.');
console.log(`Revoke with: bun run src/commands/auth.ts revoke "${name}"`);
});
} catch (e: any) {
if (e.code === '23505') {
console.error(`A token named "${name}" already exists. Revoke it first or use a different name.`);
} else {
console.error('Error:', e.message);
}
process.exit(1);
} finally {
await sql.end();
}
}

async function list() {
const sql = postgres(getDatabaseUrl(true)!);
try {
await withConfiguredSql(async (sql) => {
const rows = await sql`
SELECT name, created_at, last_used_at, revoked_at
FROM access_tokens
Expand All @@ -82,27 +75,23 @@ async function list() {
const status = r.revoked_at ? 'REVOKED' : 'active';
console.log(`${name} ${created} ${lastUsed} ${status}`);
}
} finally {
await sql.end();
}
});
}

async function revoke(name: string) {
if (!name) { console.error('Usage: auth revoke <name>'); process.exit(1); }
const sql = postgres(getDatabaseUrl(true)!);
try {
const result = await sql`
await withConfiguredSql(async (sql) => {
const rows = await sql`
UPDATE access_tokens SET revoked_at = now()
WHERE name = ${name} AND revoked_at IS NULL
RETURNING 1
`;
if (result.count === 0) {
if (rows.length === 0) {
console.error(`No active token found with name "${name}".`);
process.exit(1);
}
console.log(`Token "${name}" revoked.`);
} finally {
await sql.end();
}
});
}

async function test(url: string, token: string) {
Expand Down Expand Up @@ -230,26 +219,25 @@ async function revokeClient(clientId: string) {
console.error('Usage: auth revoke-client <client_id>');
process.exit(1);
}
const sql = postgres(getDatabaseUrl(true)!);
try {
// Atomic single-statement delete: no race window between count + delete.
// Postgres cascades to oauth_tokens and oauth_codes (FK ON DELETE CASCADE
// declared in src/schema.sql:370,382) before the transaction commits.
const rows = await sql`
DELETE FROM oauth_clients WHERE client_id = ${clientId}
RETURNING client_id, client_name
`;
if (rows.length === 0) {
console.error(`No client found with id "${clientId}"`);
process.exit(1);
}
console.log(`OAuth client revoked: "${rows[0].client_name}" (${clientId})`);
console.log('Tokens and authorization codes purged via cascade.');
await withConfiguredSql(async (sql) => {
// Atomic single-statement delete: no race window between count + delete.
// Postgres cascades to oauth_tokens and oauth_codes (FK ON DELETE CASCADE
// declared in src/schema.sql:370,382) before the transaction commits.
const rows = await sql`
DELETE FROM oauth_clients WHERE client_id = ${clientId}
RETURNING client_id, client_name
`;
if (rows.length === 0) {
console.error(`No client found with id "${clientId}"`);
process.exit(1);
}
console.log(`OAuth client revoked: "${rows[0].client_name}" (${clientId})`);
console.log('Tokens and authorization codes purged via cascade.');
});
} catch (e: any) {
console.error('Error:', e.message);
process.exit(1);
} finally {
await sql.end();
}
}

Expand All @@ -262,25 +250,41 @@ async function registerClient(name: string, args: string[]) {
: ['client_credentials'];
const scopes = scopesIdx >= 0 && args[scopesIdx + 1] ? args[scopesIdx + 1] : 'read';

const sql = postgres(getDatabaseUrl(true)!);
try {
const { GBrainOAuthProvider } = await import('../core/oauth-provider.ts');
const provider = new GBrainOAuthProvider({ sql: sql as any });
const { clientId, clientSecret } = await provider.registerClientManual(
name, grantTypes, scopes, [],
);
console.log(`OAuth client registered: "${name}"\n`);
console.log(` Client ID: ${clientId}`);
console.log(` Client Secret: ${clientSecret}\n`);
console.log(` Grant types: ${grantTypes.join(', ')}`);
console.log(` Scopes: ${scopes}\n`);
console.log('Save the client secret — it will not be shown again.');
console.log(`Revoke with: gbrain auth revoke-client "${clientId}"`);
await withConfiguredSql(async (sql) => {
const { GBrainOAuthProvider } = await import('../core/oauth-provider.ts');
const provider = new GBrainOAuthProvider({ sql });
const { clientId, clientSecret } = await provider.registerClientManual(
name, grantTypes, scopes, [],
);
console.log(`OAuth client registered: "${name}"\n`);
console.log(` Client ID: ${clientId}`);
console.log(` Client Secret: ${clientSecret}\n`);
console.log(` Grant types: ${grantTypes.join(', ')}`);
console.log(` Scopes: ${scopes}\n`);
console.log('Save the client secret — it will not be shown again.');
console.log(`Revoke with: gbrain auth revoke-client "${clientId}"`);
});
} catch (e: any) {
console.error('Error:', e.message);
process.exit(1);
}
}

async function withConfiguredSql<T>(fn: (sql: SqlQuery) => Promise<T>): Promise<T> {
const config = loadConfig();
if (!config) {
console.error('No brain configured. Run `gbrain init` or set DATABASE_URL / GBRAIN_DATABASE_URL.');
process.exit(1);
}

const engineConfig = toEngineConfig(config);
const engine = await createEngine(engineConfig);
try {
await engine.connect(engineConfig);
return await fn(sqlQueryForEngine(engine));
} finally {
await sql.end();
try { await engine.disconnect(); } catch { /* best-effort */ }
}
}

Expand Down
27 changes: 14 additions & 13 deletions src/commands/serve-http.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import { summarizeMcpParams } from '../mcp/dispatch.ts';
import { loadConfig } from '../core/config.ts';
import { buildError, serializeError } from '../core/errors.ts';
import { VERSION } from '../version.ts';
import * as db from '../core/db.ts';
import { sqlQueryForEngine } from '../core/sql-query.ts';

interface ServeHttpOptions {
port: number;
Expand Down Expand Up @@ -66,8 +66,10 @@ export async function runServeHttp(engine: BrainEngine, options: ServeHttpOption
);
}

// Get raw SQL connection for OAuth provider
const sql = db.getConnection() as SqlQuery;
// Get an engine-aware SQL adapter for OAuth/admin infrastructure. PGLite
// does not populate db.ts's Postgres singleton, so this must route through
// the connected BrainEngine.
const sql = sqlQueryForEngine(engine) as SqlQuery;

// Initialize OAuth provider. F12 cleanup: DCR-disable now flips a
// constructor option instead of monkey-patching `_clientsStore` after
Expand Down Expand Up @@ -478,25 +480,24 @@ export async function runServeHttp(engine: BrainEngine, options: ServeHttpOption
const operation = req.query.operation as string;
const status = req.query.status as string;

// Dynamic filtering via postgres.js tagged-template fragments.
// Each filter expands to either `AND col = $N` (parameterized) or
// an empty fragment. `WHERE 1=1` lets us always have a WHERE clause
// and unconditionally append AND-prefixed fragments — no string
// interpolation, no manual escaping, no sql.unsafe.
const agentFilter = agent && agent !== 'all' ? sql`AND token_name = ${agent}` : sql``;
const opFilter = operation && operation !== 'all' ? sql`AND operation = ${operation}` : sql``;
const statusFilter = status && status !== 'all' ? sql`AND status = ${status}` : sql``;
const agentFilter = agent && agent !== 'all' ? agent : null;
const opFilter = operation && operation !== 'all' ? operation : null;
const statusFilter = status && status !== 'all' ? status : null;

const rows = await sql`
SELECT id, token_name, COALESCE(agent_name, token_name) as agent_name,
operation, latency_ms, status, params, error_message, created_at
FROM mcp_request_log
WHERE 1=1 ${agentFilter} ${opFilter} ${statusFilter}
WHERE (${agentFilter}::text IS NULL OR token_name = ${agentFilter})
AND (${opFilter}::text IS NULL OR operation = ${opFilter})
AND (${statusFilter}::text IS NULL OR status = ${statusFilter})
ORDER BY created_at DESC LIMIT ${limit} OFFSET ${offset}
`;
const [countResult] = await sql`
SELECT count(*)::int as total FROM mcp_request_log
WHERE 1=1 ${agentFilter} ${opFilter} ${statusFilter}
WHERE (${agentFilter}::text IS NULL OR token_name = ${agentFilter})
AND (${opFilter}::text IS NULL OR operation = ${opFilter})
AND (${statusFilter}::text IS NULL OR status = ${statusFilter})
`;
res.json({ rows, total: (countResult as any).total, page, pages: Math.ceil((countResult as any).total / limit) });
} catch {
Expand Down
11 changes: 8 additions & 3 deletions src/core/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,15 +85,20 @@ export function loadConfig(): GBrainConfig | null {

if (!fileConfig && !dbUrl) return null;

// Infer engine type if not explicitly set
const inferredEngine: 'postgres' | 'pglite' = fileConfig?.engine
|| (fileConfig?.database_path ? 'pglite' : 'postgres');
// Infer engine type if not explicitly set. A DATABASE_URL-style env var is
// always a Postgres connection target and must override a file-backed PGLite
// engine selection; otherwise direct-script/operator paths can silently hit
// the local PGLite brain while claiming to use the env URL.
const inferredEngine: 'postgres' | 'pglite' = dbUrl
? 'postgres'
: fileConfig?.engine || (fileConfig?.database_path ? 'pglite' : 'postgres');

// Merge: env vars override config file. READ only — never mutate process.env.
const merged = {
...fileConfig,
engine: inferredEngine,
...(dbUrl ? { database_url: dbUrl } : {}),
...(dbUrl ? { database_path: undefined } : {}),
...(process.env.OPENAI_API_KEY ? { openai_api_key: process.env.OPENAI_API_KEY } : {}),
...(process.env.GBRAIN_EMBEDDING_MODEL ? { embedding_model: process.env.GBRAIN_EMBEDDING_MODEL } : {}),
...(process.env.GBRAIN_EMBEDDING_DIMENSIONS ? { embedding_dimensions: parseInt(process.env.GBRAIN_EMBEDDING_DIMENSIONS, 10) } : {}),
Expand Down
11 changes: 9 additions & 2 deletions src/core/oauth-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,15 @@ import { hashToken, generateToken, isUndefinedColumnError } from './utils.ts';
// Types
// ---------------------------------------------------------------------------

/** Raw SQL query function — works with both PGLite and postgres tagged templates */
export type SqlQuery = (strings: TemplateStringsArray, ...values: unknown[]) => Promise<Record<string, unknown>[]>;
/**
* Minimal tagged SQL function used by OAuth/admin infrastructure.
*
* This is deliberately narrower than postgres.js's `sql` tag: values must be
* scalar bind parameters only. It does not support nested SQL fragments,
* sql.json(), sql.unsafe(), sql.begin(), or direct JS array binding.
*/
export type SqlValue = string | number | bigint | boolean | Date | null;
export type SqlQuery = (strings: TemplateStringsArray, ...values: SqlValue[]) => Promise<Record<string, unknown>[]>;

/**
* Convert a JS array to a PostgreSQL array literal for PGLite compat.
Expand Down
47 changes: 47 additions & 0 deletions src/core/sql-query.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import type { BrainEngine } from './engine.ts';
import type { SqlQuery, SqlValue } from './oauth-provider.ts';

/**
* Build a minimal tagged-template SQL adapter over the active BrainEngine.
*
* OAuth/admin code only needs scalar positional parameters plus returned rows.
* This is not a postgres.js compatibility layer: nested fragments, sql.json,
* sql.unsafe, sql.begin, and direct JS array binding are intentionally outside
* the contract. Using BrainEngine.executeRaw keeps the path engine-aware:
* Postgres goes through the connected postgres.js client, while PGLite goes
* through its embedded query API.
*/
export function sqlQueryForEngine(engine: BrainEngine): SqlQuery {
return async (strings: TemplateStringsArray, ...values: SqlValue[]) => {
for (const value of values) {
assertSqlValue(value);
}
const query = strings.reduce((acc, str, i) => {
return acc + str + (i < values.length ? `$${i + 1}` : '');
}, '');
return engine.executeRaw(query, values);
};
}

function assertSqlValue(value: unknown): asserts value is SqlValue {
if (
value === null ||
typeof value === 'string' ||
typeof value === 'number' ||
typeof value === 'bigint' ||
typeof value === 'boolean' ||
value instanceof Date
) {
return;
}

const kind = Array.isArray(value)
? 'array'
: value && typeof (value as { then?: unknown }).then === 'function'
? 'promise'
: typeof value;
throw new TypeError(
`sqlQueryForEngine only supports scalar bind values; got ${kind}. ` +
'Use fixed SQL with scalar params, or add an explicit cross-engine helper.',
);
}
11 changes: 6 additions & 5 deletions src/mcp/http-transport.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,14 +114,15 @@ function resolveClientIp(req: Request, server: { requestIP: (r: Request) => { ad
export async function startHttpTransport(opts: HttpTransportOptions) {
const { port, engine } = opts;

// Fail-fast: HTTP transport requires Postgres because access_tokens / mcp_request_log
// only exist in the Postgres schema (see src/core/pglite-schema.ts:5-6).
// Legacy HTTP transport requires Postgres because it reaches into the
// postgres.js client directly. The main `gbrain serve --http` path uses
// src/commands/serve-http.ts and is engine-aware, including PGLite.
if ((engine as { kind?: string }).kind !== 'postgres') {
console.error('Error: gbrain serve --http requires a Postgres engine for remote auth tokens.');
console.error('PGLite is local-only by design (access_tokens table is Postgres-only).');
console.error('Error: legacy HTTP transport requires a Postgres engine.');
console.error('Use `gbrain serve --http` / src/commands/serve-http.ts for the engine-aware HTTP server.');
console.error('Either:');
console.error(' - Use stdio: gbrain serve');
console.error(' - Migrate to Postgres: gbrain migrate --to supabase');
console.error(' - Use the current serve-http.ts path for PGLite HTTP');
process.exit(1);
}

Expand Down
Loading