diff --git a/src/commands/import.ts b/src/commands/import.ts index 58992f11a..482908f86 100644 --- a/src/commands/import.ts +++ b/src/commands/import.ts @@ -28,7 +28,7 @@ export interface RunImportResult { failures: Array<{ path: string; error: string }>; } -export async function runImport(engine: BrainEngine, args: string[], opts: { commit?: string } = {}): Promise { +export async function runImport(engine: BrainEngine, args: string[], opts: { commit?: string; exclude?: string[]; slugRoot?: string } = {}): Promise { const noEmbed = args.includes('--no-embed'); const fresh = args.includes('--fresh'); const jsonOutput = args.includes('--json'); @@ -58,19 +58,28 @@ export async function runImport(engine: BrainEngine, args: string[], opts: { com // Collect all .md files const allFiles = collectMarkdownFiles(dir); - console.log(`Found ${allFiles.length} markdown files`); + + // Apply exclude patterns from opts (passed by performFullSync for --exclude support) + let filteredFiles = allFiles; + if (opts.exclude && opts.exclude.length > 0) { + const { matchesAnyGlob } = await import('../core/sync.ts'); + filteredFiles = allFiles.filter(abs => !matchesAnyGlob(relative(dir, abs), opts.exclude!)); + console.log(`Found ${filteredFiles.length} markdown files (${allFiles.length - filteredFiles.length} excluded by --exclude patterns)`); + } else { + console.log(`Found ${allFiles.length} markdown files`); + } // Resume from checkpoint if available const checkpointPath = gbrainPath('import-checkpoint.json'); - let files = allFiles; + let files = filteredFiles; let resumeIndex = 0; if (!fresh && existsSync(checkpointPath)) { try { const cp = JSON.parse(readFileSync(checkpointPath, 'utf-8')); - if (cp.dir === dir && cp.totalFiles === allFiles.length) { + if (cp.dir === dir && cp.totalFiles === filteredFiles.length) { resumeIndex = cp.processedIndex; - files = allFiles.slice(resumeIndex); + files = filteredFiles.slice(resumeIndex); console.log(`Resuming from checkpoint: skipping ${resumeIndex} already-processed files`); } } catch { @@ -102,8 +111,10 @@ export async function runImport(engine: BrainEngine, args: string[], opts: { com progress.tick(1, `imported=${imported} skipped=${skipped} errors=${errors}`); } + const slugBase = opts.slugRoot ?? dir; + async function processFile(eng: BrainEngine, filePath: string) { - const relativePath = relative(dir, filePath); + const relativePath = relative(slugBase, filePath); try { // v0.27.1 (F2): dispatch image extensions to importImageFile when // multimodal is enabled. The walker (collectMarkdownFiles) only picks diff --git a/src/commands/sync.ts b/src/commands/sync.ts index 8d76e82fc..d79bed028 100644 --- a/src/commands/sync.ts +++ b/src/commands/sync.ts @@ -1,4 +1,4 @@ -import { existsSync, readFileSync, writeFileSync, statSync, readdirSync } from 'fs'; +import { existsSync, readFileSync, writeFileSync, statSync, readdirSync, realpathSync } from 'fs'; import { execFileSync } from 'child_process'; import { join, relative } from 'path'; import type { BrainEngine } from '../core/engine.ts'; @@ -187,6 +187,18 @@ export interface SyncOpts { * v0.22.13 (PR #490 CODEX-2). Not part of the public CLI surface. */ skipLock?: boolean; + /** + * Sync only files under this subdirectory of the git repo. When set, + * the git context root is still discovered from the nearest `.git/` + * ancestor, but file walking and import scope are limited to this subpath. + * Enables N logical sources in a single git repo (monorepo pattern). + */ + srcSubpath?: string; + /** + * Glob patterns for files to exclude from sync (repeatable via CLI). + * Passed through to `isSyncable`'s `opts.exclude` field. + */ + exclude?: string[]; } function git(repoPath: string, ...args: string[]): string { @@ -196,6 +208,40 @@ function git(repoPath: string, ...args: string[]): string { }).trim(); } +/** + * Walk up from inputPath to find the nearest git repo root via + * `git -C rev-parse --show-toplevel`. Handles worktrees and + * submodules natively (git itself resolves them). Throws a user-friendly + * error when no git repo is found. + */ +function discoverGitRoot(inputPath: string): string { + try { + return execFileSync('git', ['-C', inputPath, 'rev-parse', '--show-toplevel'], { + encoding: 'utf-8', + timeout: 10000, + }).trim(); + } catch { + throw new Error( + `Not inside a git repository: ${inputPath}. GBrain sync requires a git-initialized repo (or a subdirectory of one).`, + ); + } +} + +/** + * Returns true only if filePath resolves (via realpathSync) to a path + * inside gitRoot. Guards against symlink-escape TOCTOU: the check happens + * at the file level, not just at scope entry. + */ +function isPathSafe(filePath: string, gitRoot: string): boolean { + try { + const real = realpathSync(filePath); + const rootReal = realpathSync(gitRoot); + return real.startsWith(rootReal + '/') || real === rootReal; + } catch { + return false; + } +} + // v0.18.0 Step 5: source-scoped sync state helpers. When opts.sourceId // is set, read/write the per-source row instead of the global config // keys. These wrappers centralize the branch so every read/write site @@ -370,10 +416,26 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise rev-parse --show-toplevel`, which also + // handles worktrees and submodules natively. + const gitContextRoot = realpathSync(discoverGitRoot(repoPath)); + const rawScopeRoot = opts.srcSubpath ? join(repoPath, opts.srcSubpath) : repoPath; + if (!existsSync(rawScopeRoot)) { + throw new Error(`Sync scope does not exist: ${rawScopeRoot}`); + } + const syncScopeRoot = realpathSync(rawScopeRoot); + // NAV-1 scope-entry: verify syncScopeRoot is inside gitContextRoot. + // Catches `--src-subpath ../../../etc` path traversal before any git op runs. + if (!syncScopeRoot.startsWith(gitContextRoot + '/') && syncScopeRoot !== gitContextRoot) { + throw new Error( + `Sync scope ${syncScopeRoot} resolves outside git repo ${gitContextRoot}. ` + + `Refusing to sync: possible path traversal via --src-subpath.`, + ); } + // Relative path from git root to sync scope (empty string when no subpath). + const syncScopeRelPath = relative(gitContextRoot, syncScopeRoot); // Git pull (unless --no-pull). v0.28.1 codex finding (HIGH): the legacy // git() helper at sync.ts:192 spawns git without GIT_SSRF_FLAGS, so @@ -381,10 +443,11 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise { + if (!syncScopeRelPath) return true; + return p === syncScopeRelPath || p.startsWith(syncScopeRelPath + '/'); + }; + + // NAV-4: warn if --exclude patterns would filter everything out. + if (opts.exclude && opts.exclude.length > 0) { + const inScopeAdded = manifest.added.filter(inScope); + const inScopeModified = manifest.modified.filter(inScope); + if (inScopeAdded.length > 0 || inScopeModified.length > 0) { + const allExcluded = + inScopeAdded.every(p => !isSyncable(p, { exclude: opts.exclude })) && + inScopeModified.every(p => !isSyncable(p, { exclude: opts.exclude })); + if (allExcluded) { + console.warn( + `[gbrain sync] No files matched after applying ${opts.exclude.length} --exclude pattern(s). ` + + `Check your --exclude flags. Patterns: ${JSON.stringify(opts.exclude)}`, + ); + } + } + } + + // Filter to syncable files (strategy-aware + scope-aware + exclude-aware) + const syncOpts = opts.strategy || opts.exclude + ? { strategy: opts.strategy, exclude: opts.exclude } + : undefined; const filtered: SyncManifest = { - added: manifest.added.filter(p => isSyncable(p, syncOpts)), - modified: manifest.modified.filter(p => isSyncable(p, syncOpts)), - deleted: manifest.deleted.filter(p => isSyncable(p, syncOpts)), - renamed: manifest.renamed.filter(r => isSyncable(r.to, syncOpts)), + added: manifest.added.filter(p => inScope(p) && isSyncable(p, syncOpts)), + modified: manifest.modified.filter(p => inScope(p) && isSyncable(p, syncOpts)), + deleted: manifest.deleted.filter(p => inScope(p) && isSyncable(p, syncOpts)), + renamed: manifest.renamed.filter(r => inScope(r.to) && isSyncable(r.to, syncOpts)), }; // Delete pages that became un-syncable (modified but filtered out). @@ -482,7 +573,7 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise !isSyncable(p, syncOpts)); + const unsyncableModified = manifest.modified.filter(p => inScope(p) && !isSyncable(p, syncOpts)); for (const path of unsyncableModified) { const slug = resolveSlugForPath(path); try { @@ -575,9 +666,10 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise { - const filePath = join(syncRepoPath, path); + const filePath = join(gitContextRoot, path); + // NAV-1 TOCTOU: re-validate each file's realpath during the walk to + // guard against symlink-escape that slips past the scope-entry check. + if (!isPathSafe(filePath, gitContextRoot)) { + failedFiles.push({ path, error: 'path resolves outside git repo (symlink escape)' }); + progress.tick(1, `skip:${path}`); + return; + } if (!existsSync(filePath)) { // CODEX-3 (v0.22.13): a file the diff said exists at headCommit but // is gone from disk means the working tree has drifted (someone ran @@ -724,7 +822,7 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise', @@ -760,7 +858,7 @@ async function performSyncInner(engine: BrainEngine, opts: SyncOpts): Promise { - // Dry-run: walk the repo, count syncable files, return without writing. + const syncOpts = opts.strategy || opts.exclude + ? { strategy: opts.strategy, exclude: opts.exclude } + : undefined; + + // Dry-run: walk the scope, count syncable files, return without writing. // Fixes the silent-write-on-dry-run bug where performFullSync called // runImport unconditionally regardless of opts.dryRun. if (opts.dryRun) { const { collectMarkdownFiles } = await import('./import.ts'); - const allFiles = collectMarkdownFiles(repoPath); + const allFiles = collectMarkdownFiles(syncScopeRoot); const syncableRelPaths = allFiles - .map(abs => relative(repoPath, abs)) - .filter(rel => isSyncable(rel)); + .map(abs => relative(syncScopeRoot, abs)) + .filter(rel => isSyncable(rel, syncOpts)); console.log( `Full-sync dry run: ${syncableRelPaths.length} file(s) would be imported ` + - `from ${repoPath} @ ${headCommit.slice(0, 8)}.`, + `from ${syncScopeRoot} @ ${headCommit.slice(0, 8)}.`, ); return { status: 'dry_run', @@ -877,6 +980,21 @@ async function performFullSync( }; } + // NAV-4 full-sync variant: warn if --exclude would exclude everything. + if (opts.exclude && opts.exclude.length > 0) { + const { collectMarkdownFiles } = await import('./import.ts'); + const allFiles = collectMarkdownFiles(syncScopeRoot); + if (allFiles.length > 0) { + const anyPasses = allFiles.some(abs => isSyncable(relative(syncScopeRoot, abs), syncOpts)); + if (!anyPasses) { + console.warn( + `[gbrain sync] No files matched after applying ${opts.exclude.length} --exclude pattern(s). ` + + `Check your --exclude flags. Patterns: ${JSON.stringify(opts.exclude)}`, + ); + } + } + } + // v0.22.13 (PR #490 A1 + Q5): full sync is always "large" by definition // (entire working tree). Auto-concurrency fires unconditionally for Postgres; // PGLite stays serial because its engine is single-connection. Routes the @@ -884,12 +1002,20 @@ async function performFullSync( // sync and the jobs handler. const FULL_SYNC_LARGE_MARKER = Number.MAX_SAFE_INTEGER; const fullConcurrency = autoConcurrency(engine, FULL_SYNC_LARGE_MARKER, opts.concurrency); - console.log(`Running full import of ${repoPath}${fullConcurrency > 1 ? ` (${fullConcurrency} workers)` : ''}...`); + console.log(`Running full import of ${syncScopeRoot}${fullConcurrency > 1 ? ` (${fullConcurrency} workers)` : ''}...`); const { runImport } = await import('./import.ts'); - const importArgs = [repoPath]; + const importArgs = [syncScopeRoot]; if (opts.noEmbed) importArgs.push('--no-embed'); if (fullConcurrency > 1) importArgs.push('--workers', String(fullConcurrency)); - const result = await runImport(engine, importArgs, { commit: headCommit }); + // Use git-root-relative slugs ONLY when --src-subpath was explicitly + // provided. Sources registered with a direct subdirectory path (e.g. + // wiki → ~/atlas/shared/wiki) expect slugs relative to that path, not + // the git root — they may carry frontmatter slug: fields matching the + // old path-relative convention. The --src-subpath monorepo case is the + // only place git-root-relative slugs are intentional. + const scopeRel = relative(gitContextRoot, syncScopeRoot); + const slugRoot = (opts.srcSubpath && scopeRel) ? gitContextRoot : undefined; + const result = await runImport(engine, importArgs, { commit: headCommit, exclude: opts.exclude, slugRoot }); // Bug 9 — gate the full-sync bookmark on success. runImport already // writes its own sync.last_commit conditionally (import.ts), but @@ -905,7 +1031,7 @@ async function performFullSync( `Fix the YAML in those files and re-run, or use '--skip-failed'.`, ); await engine.setConfig('sync.last_run', new Date().toISOString()); - await writeSyncAnchor(engine, opts.sourceId, 'repo_path', repoPath); + await writeSyncAnchor(engine, opts.sourceId, 'repo_path', syncScopeRoot); return { status: 'blocked_by_failures', fromCommit: null, @@ -931,7 +1057,7 @@ async function performFullSync( // to the right sources row rather than the global config. await writeSyncAnchor(engine, opts.sourceId, 'last_commit', headCommit); await engine.setConfig('sync.last_run', new Date().toISOString()); - await writeSyncAnchor(engine, opts.sourceId, 'repo_path', repoPath); + await writeSyncAnchor(engine, opts.sourceId, 'repo_path', syncScopeRoot); // v0.20.0 Cathedral II Layer 12: persist chunker version for the gate. await writeChunkerVersion(engine, opts.sourceId, String(CHUNKER_VERSION)); @@ -976,6 +1102,13 @@ export async function runSync(engine: BrainEngine, args: string[]) { const jsonOut = args.includes('--json'); const yesFlag = args.includes('--yes'); const strategyArg = args.find((a, i) => args[i - 1] === '--strategy') as SyncOpts['strategy'] | undefined; + const srcSubpath = args.find((a, i) => args[i - 1] === '--src-subpath') || undefined; + const exclude: string[] = []; + for (let i = 0; i < args.length; i++) { + if (args[i] === '--exclude' && i + 1 < args.length) { + exclude.push(args[i + 1]); + } + } const concurrencyStr = args.find((a, i) => args[i - 1] === '--concurrency' || args[i - 1] === '--workers'); // v0.22.13 (PR #490 Q2): parseWorkers throws on '0', '-3', 'foo', '1.5' instead // of silently falling through to auto-concurrency or NaN. Loud failure beats @@ -1089,7 +1222,9 @@ export async function runSync(engine: BrainEngine, args: string[]) { // the advertised db_only ignore rules unless they sync each repo // individually. if (result.status !== 'dry_run' && result.status !== 'blocked_by_failures') { - manageGitignore(src.local_path!, engine.kind); + let gitRootForIgnore = src.local_path!; + try { gitRootForIgnore = discoverGitRoot(src.local_path!); } catch { /* best-effort */ } + manageGitignore(gitRootForIgnore, engine.kind); } } catch (e: unknown) { console.error(`Error syncing ${src.name}: ${e instanceof Error ? e.message : String(e)}`); @@ -1098,7 +1233,12 @@ export async function runSync(engine: BrainEngine, args: string[]) { return; } - const opts: SyncOpts = { repoPath, dryRun, full, noPull, noEmbed, skipFailed, retryFailed, sourceId, strategy: strategyArg, concurrency }; + const opts: SyncOpts = { + repoPath, dryRun, full, noPull, noEmbed, skipFailed, retryFailed, sourceId, + strategy: strategyArg, concurrency, + srcSubpath: srcSubpath || undefined, + exclude: exclude.length > 0 ? exclude : undefined, + }; // Bug 9 — --retry-failed: before running normal sync, clear acknowledgment // flags so the sync picks them up as fresh work. The actual re-attempt @@ -1126,7 +1266,10 @@ export async function runSync(engine: BrainEngine, args: string[]) { if (result.status !== 'dry_run' && result.status !== 'blocked_by_failures') { const effectiveRepoPath = opts.repoPath ?? (await getDefaultSourcePath(engine)); if (effectiveRepoPath) { - manageGitignore(effectiveRepoPath, engine.kind); + // .gitignore must be managed at the git root, not a subdir. + let gitRootForIgnore = effectiveRepoPath; + try { gitRootForIgnore = discoverGitRoot(effectiveRepoPath); } catch { /* best-effort */ } + manageGitignore(gitRootForIgnore, engine.kind); } } return; diff --git a/src/core/sync.ts b/src/core/sync.ts index 60a9fca80..e05f51a36 100644 --- a/src/core/sync.ts +++ b/src/core/sync.ts @@ -200,7 +200,7 @@ function globToRegex(pattern: string): RegExp { return new RegExp(regex); } -function matchesAnyGlob(path: string, patterns?: string[]): boolean { +export function matchesAnyGlob(path: string, patterns?: string[]): boolean { if (!patterns || patterns.length === 0) return false; const normalized = path.replace(/\\/g, '/'); return patterns.some((pattern) => globToRegex(pattern).test(normalized)); diff --git a/test/sync-monorepo.test.ts b/test/sync-monorepo.test.ts new file mode 100644 index 000000000..492faf66e --- /dev/null +++ b/test/sync-monorepo.test.ts @@ -0,0 +1,281 @@ +/** + * Tests for --src-subpath and --exclude monorepo support (PR-A+B). + * + * Regression check: run this file against upstream master BEFORE this PR's + * changes; every test should fail with "Not a git repository" or similar. + * After applying the PR changes, all tests should pass. + */ + +import { describe, test, expect, beforeAll, afterAll, beforeEach, afterEach } from 'bun:test'; +import { mkdtempSync, writeFileSync, rmSync, mkdirSync, symlinkSync } from 'fs'; +import { join } from 'path'; +import { execSync } from 'child_process'; +import { tmpdir } from 'os'; +import { PGLiteEngine } from '../src/core/pglite-engine.ts'; +import { resetPgliteState } from './helpers/reset-pglite.ts'; + +// Helper: create a minimal valid markdown file +function mdPage(title: string, body = 'Content.'): string { + return `---\ntype: note\ntitle: ${title}\n---\n\n${body}`; +} + +// Helper: init a git repo with author identity +function gitInit(dir: string): void { + execSync('git init', { cwd: dir, stdio: 'pipe' }); + execSync('git config user.email "test@test.com"', { cwd: dir, stdio: 'pipe' }); + execSync('git config user.name "Test"', { cwd: dir, stdio: 'pipe' }); +} + +// Helper: stage + commit everything in a git repo +function gitCommit(dir: string, msg = 'initial'): void { + execSync('git add -A', { cwd: dir, stdio: 'pipe' }); + execSync(`git commit -m "${msg}"`, { cwd: dir, stdio: 'pipe' }); +} + +describe('sync monorepo subdir-source support (PR-A+B)', () => { + let engine: PGLiteEngine; + let repoPath: string; + + beforeAll(async () => { + engine = new PGLiteEngine(); + await engine.connect({}); + await engine.initSchema(); + }); + + afterAll(async () => { + await engine.disconnect(); + }); + + beforeEach(async () => { + await resetPgliteState(engine); + repoPath = mkdtempSync(join(tmpdir(), 'gbrain-monorepo-')); + gitInit(repoPath); + mkdirSync(join(repoPath, 'wiki'), { recursive: true }); + mkdirSync(join(repoPath, 'memory'), { recursive: true }); + writeFileSync(join(repoPath, 'wiki', 'page1.md'), mdPage('Wiki Page 1')); + writeFileSync(join(repoPath, 'wiki', 'page2.md'), mdPage('Wiki Page 2')); + writeFileSync(join(repoPath, 'memory', 'note1.md'), mdPage('Memory Note 1')); + writeFileSync(join(repoPath, 'memory', 'note2.md'), mdPage('Memory Note 2')); + gitCommit(repoPath); + }); + + afterEach(() => { + if (repoPath) rmSync(repoPath, { recursive: true, force: true }); + }); + + // ───────────────────────────────────────────────────────────────────────── + // Back-compat: sync at git root (no srcSubpath) still works + // ───────────────────────────────────────────────────────────────────────── + + test('back-compat: sync at git root without srcSubpath imports all files', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + const result = await performSync(engine, { + repoPath, + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(4); // wiki/page1 + wiki/page2 + memory/note1 + memory/note2 + }); + + // ───────────────────────────────────────────────────────────────────────── + // Auto-discovery: repoPath IS a non-git-root subdir + // ───────────────────────────────────────────────────────────────────────── + + test('auto-discovery: repoPath is a git subdir — discoverGitRoot succeeds', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + // Pass the wiki/ subdir directly as repoPath (no explicit srcSubpath). + // Before this PR: throws "Not a git repository". + // After this PR: discovers gitContextRoot = repoPath, syncScopeRoot = wiki/. + const result = await performSync(engine, { + repoPath: join(repoPath, 'wiki'), + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(2); // only wiki/page1 + wiki/page2 + }); + + // ───────────────────────────────────────────────────────────────────────── + // srcSubpath explicit flag: scope to subdir from git root + // ───────────────────────────────────────────────────────────────────────── + + test('--src-subpath wiki: only wiki/ files are imported', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + const result = await performSync(engine, { + repoPath, + srcSubpath: 'wiki', + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(2); + // Verify the imported slugs are from wiki/ only + const wikiPage = await engine.getPage('wiki/page1'); + expect(wikiPage).not.toBeNull(); + const memoryPage = await engine.getPage('memory/note1'); + expect(memoryPage).toBeNull(); + }); + + test('--src-subpath memory: only memory/ files are imported', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + const result = await performSync(engine, { + repoPath, + srcSubpath: 'memory', + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(2); + const memoryPage = await engine.getPage('memory/note1'); + expect(memoryPage).not.toBeNull(); + const wikiPage = await engine.getPage('wiki/page1'); + expect(wikiPage).toBeNull(); + }); + + // ───────────────────────────────────────────────────────────────────────── + // Two sources in one repo, scoped independently + // ───────────────────────────────────────────────────────────────────────── + + test('2 sources in 1 repo: sync each scope independently, no cross-contamination', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + + const wikiResult = await performSync(engine, { + repoPath, + srcSubpath: 'wiki', + noPull: true, + noEmbed: true, + full: true, + }); + expect(wikiResult.status).toBe('first_sync'); + expect(wikiResult.added).toBe(2); + + // Reset only page state, keep the engine connected for second sync + await resetPgliteState(engine); + + const memResult = await performSync(engine, { + repoPath, + srcSubpath: 'memory', + noPull: true, + noEmbed: true, + full: true, + }); + expect(memResult.status).toBe('first_sync'); + expect(memResult.added).toBe(2); + + // After memory sync, memory pages exist and wiki pages don't + expect(await engine.getPage('memory/note1')).not.toBeNull(); + expect(await engine.getPage('wiki/page1')).toBeNull(); + }); + + // ───────────────────────────────────────────────────────────────────────── + // Path-traversal sanitization (NAV-1 + NAV-2) + // ───────────────────────────────────────────────────────────────────────── + + test('path-traversal: --src-subpath ../escape is rejected before any git op', async () => { + const outsideDir = mkdtempSync(join(tmpdir(), 'gbrain-escape-')); + try { + // Create the escape target dir + mkdirSync(outsideDir, { recursive: true }); + const { performSync } = await import('../src/commands/sync.ts'); + await expect( + performSync(engine, { + repoPath, + srcSubpath: '../' + outsideDir.split('/').pop(), + noPull: true, + noEmbed: true, + full: true, + }), + ).rejects.toThrow(/outside git repo|does not exist/i); + } finally { + rmSync(outsideDir, { recursive: true, force: true }); + } + }); + + test('path-traversal: symlink subdir pointing outside repo is rejected (NAV-1 TOCTOU)', async () => { + const outsideDir = mkdtempSync(join(tmpdir(), 'gbrain-sym-target-')); + writeFileSync(join(outsideDir, 'secret.md'), mdPage('Secret')); + const symlinkPath = join(repoPath, 'symlink-escape'); + try { + symlinkSync(outsideDir, symlinkPath); + const { performSync } = await import('../src/commands/sync.ts'); + await expect( + performSync(engine, { + repoPath, + srcSubpath: 'symlink-escape', + noPull: true, + noEmbed: true, + full: true, + }), + ).rejects.toThrow(/outside git repo/i); + } finally { + rmSync(outsideDir, { recursive: true, force: true }); + } + }); + + // ───────────────────────────────────────────────────────────────────────── + // --exclude: repeatable glob pattern flag + // ───────────────────────────────────────────────────────────────────────── + + test('--exclude: single pattern excludes matching files from full sync', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + // Sync wiki/ but exclude page2.md + const result = await performSync(engine, { + repoPath, + srcSubpath: 'wiki', + exclude: ['page2.md'], + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(1); // only page1 (page2 excluded) + }); + + test('--exclude: glob pattern with wildcard', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + // Exclude all files matching *2.md + const result = await performSync(engine, { + repoPath, + srcSubpath: 'wiki', + exclude: ['*2.md'], + noPull: true, + noEmbed: true, + full: true, + }); + expect(result.status).toBe('first_sync'); + expect(result.added).toBe(1); // only page1 (page2 excluded by *2.md) + }); + + // ───────────────────────────────────────────────────────────────────────── + // --exclude '**/*' emits warning (NAV-4) + // ───────────────────────────────────────────────────────────────────────── + + test('--exclude **/* emits warning when all files are excluded (NAV-4)', async () => { + const { performSync } = await import('../src/commands/sync.ts'); + const warnMessages: string[] = []; + const origWarn = console.warn; + console.warn = (...args: unknown[]) => { + warnMessages.push(args.join(' ')); + origWarn(...args); + }; + try { + await performSync(engine, { + repoPath, + srcSubpath: 'wiki', + exclude: ['**/*'], + noPull: true, + noEmbed: true, + full: true, + }); + } finally { + console.warn = origWarn; + } + const hasExcludeWarn = warnMessages.some(m => m.includes('--exclude') || m.includes('No files matched')); + expect(hasExcludeWarn).toBe(true); + }); +});