Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 5 additions & 7 deletions src/commands/doctor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import * as db from '../core/db.ts';
import { LATEST_VERSION, getIdleBlockers } from '../core/migrate.ts';
import { checkResolvable } from '../core/check-resolvable.ts';
import { autoFixDryViolations, type AutoFixReport, type FixOutcome } from '../core/dry-fix.ts';
import { findRepoRoot } from '../core/repo-root.ts';
import { autoDetectSkillsDir } from '../core/repo-root.ts';
import { loadCompletedMigrations } from '../core/preferences.ts';
import { compareVersions } from './migrations/index.ts';
import { createProgress, startHeartbeat, type ProgressReporter } from '../core/progress.ts';
Expand Down Expand Up @@ -228,10 +228,9 @@ export async function runDoctor(engine: BrainEngine | null, args: string[], dbSo
// --- Filesystem checks (always run, no DB needed) ---

// 1. Resolver health
const repoRoot = findRepoRoot();
if (repoRoot) {
const skillsDir = join(repoRoot, 'skills');

const skillsDetection = autoDetectSkillsDir();
const skillsDir = skillsDetection.dir;
if (skillsDir) {
// --fix: run auto-repair BEFORE checkResolvable so the post-fix scan
// reflects the new state. Auto-fix only targets DRY violations today;
// other resolver issues are left to human repair.
Expand Down Expand Up @@ -268,8 +267,7 @@ export async function runDoctor(engine: BrainEngine | null, args: string[], dbSo
}

// 2. Skill conformance
if (repoRoot) {
const skillsDir = join(repoRoot, 'skills');
if (skillsDir) {
const conformanceResult = checkSkillConformance(skillsDir);
checks.push(conformanceResult);
}
Expand Down
61 changes: 37 additions & 24 deletions src/commands/migrations/v0_29_1.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,26 +48,32 @@ async function phaseBBackfill(opts: OrchestratorOpts): Promise<OrchestratorPhase
const { backfillEffectiveDate } = await import('../../core/backfill-effective-date.ts');
const cfg = loadConfig();
if (!cfg) throw new Error('No gbrain config; run `gbrain init` first.');
const engine = await createEngine(toEngineConfig(cfg));
const engineConfig = toEngineConfig(cfg);
const engine = await createEngine(engineConfig);
await engine.connect(engineConfig);

let totalExamined = 0;
let totalUpdated = 0;

const result = await backfillEffectiveDate(engine, {
onBatch: ({ batch, lastId, rowsTouched, cumulative }) => {
totalExamined = cumulative;
totalUpdated += rowsTouched;
if (batch % 10 === 0) {
process.stderr.write(` [backfill] batch ${batch} | last_id=${lastId} | examined=${cumulative} | updated_so_far=${totalUpdated}\n`);
}
},
});
try {
const result = await backfillEffectiveDate(engine, {
onBatch: ({ batch, lastId, rowsTouched, cumulative }) => {
totalExamined = cumulative;
totalUpdated += rowsTouched;
if (batch % 10 === 0) {
process.stderr.write(` [backfill] batch ${batch} | last_id=${lastId} | examined=${cumulative} | updated_so_far=${totalUpdated}\n`);
}
},
});

return {
name: 'backfill_effective_date',
status: 'complete',
detail: `examined=${result.examined} updated=${result.updated} fallback=${result.fallback} dur=${result.durationSec.toFixed(1)}s`,
};
return {
name: 'backfill_effective_date',
status: 'complete',
detail: `examined=${result.examined} updated=${result.updated} fallback=${result.fallback} dur=${result.durationSec.toFixed(1)}s`,
};
} finally {
await engine.disconnect();
}
} catch (e) {
return { name: 'backfill_effective_date', status: 'failed', detail: e instanceof Error ? e.message : String(e) };
}
Expand All @@ -82,15 +88,22 @@ async function phaseCVerify(opts: OrchestratorOpts): Promise<OrchestratorPhaseRe
const { loadConfig, toEngineConfig } = await import('../../core/config.ts');
const cfg = loadConfig();
if (!cfg) throw new Error('No gbrain config; run `gbrain init` first.');
const engine = await createEngine(toEngineConfig(cfg));
// Count rows where effective_date is still NULL but frontmatter HAS a
// parseable date — those are the rows the backfill should have touched
// but didn't. (Rows that fall through to 'fallback' have non-null
// effective_date already; this catches genuine misses.)
const rows = await engine.executeRaw<{ count: string }>(
`SELECT COUNT(*)::text AS count FROM pages WHERE effective_date IS NULL`,
);
const remaining = Number(rows[0]?.count ?? 0);
const engineConfig = toEngineConfig(cfg);
const engine = await createEngine(engineConfig);
await engine.connect(engineConfig);
let remaining = 0;
try {
// Count rows where effective_date is still NULL but frontmatter HAS a
// parseable date — those are the rows the backfill should have touched
// but didn't. (Rows that fall through to 'fallback' have non-null
// effective_date already; this catches genuine misses.)
const rows = await engine.executeRaw<{ count: string }>(
`SELECT COUNT(*)::text AS count FROM pages WHERE effective_date IS NULL`,
);
remaining = Number(rows[0]?.count ?? 0);
} finally {
await engine.disconnect();
}
if (remaining > 0) {
return {
name: 'verify',
Expand Down
23 changes: 9 additions & 14 deletions src/core/backfill-effective-date.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,14 +175,10 @@ export async function backfillEffectiveDate(
if (!opts.dryRun) {
// Compute effective_date for each row, then UPDATE in a batch wrapped
// in its own transaction (so SET LOCAL statement_timeout scopes to it).
// postgres.js's `transaction` would be cleaner but we're using executeRaw
// for engine portability; explicit BEGIN/COMMIT does the same on both.
if (isPostgres) {
await engine.executeRaw(`BEGIN`);
await engine.executeRaw(`SET LOCAL statement_timeout = '600s'`);
}

try {
const applyBatch = async (txEngine: BrainEngine): Promise<void> => {
if (isPostgres) {
await txEngine.executeRaw(`SET LOCAL statement_timeout = '600s'`);
}
for (const r of rows) {
const fm = parseFrontmatter(r.frontmatter);
const filename = r.import_filename
Expand Down Expand Up @@ -211,13 +207,12 @@ export async function backfillEffectiveDate(
touched++;
if (computed.source === 'fallback') fallback++;
}
};

if (isPostgres) await engine.executeRaw(`COMMIT`);
} catch (e) {
if (isPostgres) {
try { await engine.executeRaw(`ROLLBACK`); } catch { /* ignore */ }
}
throw e;
if (isPostgres) {
await engine.transaction(applyBatch);
} else {
await applyBatch(engine);
}
} else {
// Dry run: still count what WOULD change.
Expand Down
7 changes: 3 additions & 4 deletions src/core/pglite-engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2157,17 +2157,16 @@ export class PGLiteEngine implements BrainEngine {
const orphanPages = Number(r.orphan_pages);
const deadLinks = Number(r.dead_links);
const linkCount = Number(r.link_count);
const pagesWithTimeline = Number(r.pages_with_timeline);
const timelineCoverage = Number(r.timeline_coverage);

const linkDensity = pageCount > 0 ? Math.min(linkCount / pageCount, 1) : 0;
const timelineCoverageDensity = pageCount > 0 ? Math.min(pagesWithTimeline / pageCount, 1) : 0;
const noOrphans = pageCount > 0 ? 1 - (orphanPages / pageCount) : 1;
const noDeadLinks = pageCount > 0 ? 1 - Math.min(deadLinks / pageCount, 1) : 1;
// Bug 11 — per-component points. Sum equals brainScore by construction
// so `doctor` can render a breakdown that adds up to the total.
const embedCoverageScore = pageCount === 0 ? 0 : Math.round(embedCoverage * 35);
const linkDensityScore = pageCount === 0 ? 0 : Math.round(linkDensity * 25);
const timelineCoverageScore = pageCount === 0 ? 0 : Math.round(timelineCoverageDensity * 15);
const timelineCoverageScore = pageCount === 0 ? 0 : Math.round(timelineCoverage * 15);
const noOrphansScore = pageCount === 0 ? 0 : Math.round(noOrphans * 15);
const noDeadLinksScore = pageCount === 0 ? 0 : Math.round(noDeadLinks * 10);
const brainScore = embedCoverageScore + linkDensityScore + timelineCoverageScore + noOrphansScore + noDeadLinksScore;
Expand All @@ -2181,7 +2180,7 @@ export class PGLiteEngine implements BrainEngine {
brain_score: brainScore,
dead_links: deadLinks,
link_coverage: Number(r.link_coverage),
timeline_coverage: Number(r.timeline_coverage),
timeline_coverage: timelineCoverage,
most_connected: (connected as { slug: string; link_count: number }[]).map(c => ({
slug: c.slug,
link_count: Number(c.link_count),
Expand Down
7 changes: 3 additions & 4 deletions src/core/postgres-engine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2268,17 +2268,16 @@ export class PostgresEngine implements BrainEngine {
const orphanPages = Number(h.orphan_pages);
const deadLinks = Number(h.dead_links);
const linkCount = Number(h.link_count);
const pagesWithTimeline = Number(h.pages_with_timeline);
const timelineCoverage = Number(h.timeline_coverage);

// brain_score: 0-100 weighted average
const linkDensity = pageCount > 0 ? Math.min(linkCount / pageCount, 1) : 0;
const timelineCoverageWhole = pageCount > 0 ? Math.min(pagesWithTimeline / pageCount, 1) : 0;
const noOrphans = pageCount > 0 ? 1 - (orphanPages / pageCount) : 1;
const noDeadLinks = pageCount > 0 ? 1 - Math.min(deadLinks / pageCount, 1) : 1;
// Per-component points. Sum equals brainScore by construction.
const embedCoverageScore = pageCount === 0 ? 0 : Math.round(embedCoverage * 35);
const linkDensityScore = pageCount === 0 ? 0 : Math.round(linkDensity * 25);
const timelineCoverageScore = pageCount === 0 ? 0 : Math.round(timelineCoverageWhole * 15);
const timelineCoverageScore = pageCount === 0 ? 0 : Math.round(timelineCoverage * 15);
const noOrphansScore = pageCount === 0 ? 0 : Math.round(noOrphans * 15);
const noDeadLinksScore = pageCount === 0 ? 0 : Math.round(noDeadLinks * 10);
const brainScore = embedCoverageScore + linkDensityScore + timelineCoverageScore + noOrphansScore + noDeadLinksScore;
Expand All @@ -2292,7 +2291,7 @@ export class PostgresEngine implements BrainEngine {
brain_score: brainScore,
dead_links: deadLinks,
link_coverage: Number(h.link_coverage),
timeline_coverage: Number(h.timeline_coverage),
timeline_coverage: timelineCoverage,
most_connected: (connected as unknown as { slug: string; link_count: number }[]).map(c => ({
slug: c.slug,
link_count: Number(c.link_count),
Expand Down
20 changes: 20 additions & 0 deletions test/brain-score-breakdown.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,26 @@ describe('Bug 11 — brain_score breakdown sums to total', () => {
expect(sum).toBe(h.brain_score);
});

test('timeline score follows entity timeline coverage, not all-page density', async () => {
await engine.putPage('entity-a', { type: 'person', title: 'Entity A', compiled_truth: 'person', frontmatter: {} });
await engine.putPage('entity-b', { type: 'company', title: 'Entity B', compiled_truth: 'company', frontmatter: {} });
for (let i = 0; i < 20; i += 1) {
await engine.putPage(`note-${i}`, { type: 'note', title: `Note ${i}`, compiled_truth: 'note', frontmatter: {} });
}

for (const slug of ['entity-a', 'entity-b']) {
const pageId = (await (engine as any).db.query(`SELECT id FROM pages WHERE slug=$1`, [slug])).rows[0].id;
await (engine as any).db.query(
`INSERT INTO timeline_entries (page_id, date, summary) VALUES ($1, $2, $3)`,
[pageId, '2026-05-09', 'timeline covered'],
);
}

const h = await engine.getHealth();
expect(h.timeline_coverage).toBe(1);
expect(h.timeline_coverage_score).toBe(15);
});

test('brain_score caps at 100', async () => {
const h = await engine.getHealth();
expect(h.brain_score).toBeGreaterThanOrEqual(0);
Expand Down