diff --git a/.github/workflows/deploy-cloudflare.yml b/.github/workflows/deploy-cloudflare.yml new file mode 100644 index 0000000..6b66a12 --- /dev/null +++ b/.github/workflows/deploy-cloudflare.yml @@ -0,0 +1,36 @@ +name: Deploy to Cloudflare Workers + +on: + workflow_dispatch: + inputs: + environment: + description: "Deployment environment" + required: false + default: "production" + type: choice + options: + - production + - preview + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: "18" + cache: "npm" + + - name: Install dependencies + run: npm install + + - name: Build frontend + run: npm run build + + - name: Deploy to Cloudflare Workers + working-directory: apps/proof-cloudflare + run: npx wrangler deploy + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} diff --git a/.gitignore b/.gitignore index 5837181..e416181 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,9 @@ npm-debug.log* # Scratch .tmp/ .worktrees/ + +# Wrangler (Cloudflare dev artifacts) +.wrangler/ build-release/ build-release-dist/ diff --git a/AGENT_CONTRACT.md b/AGENT_CONTRACT.md index bc26f90..e5a25dc 100644 --- a/AGENT_CONTRACT.md +++ b/AGENT_CONTRACT.md @@ -1,6 +1,6 @@ # Agent Contract: Direct Markdown Sharing -This contract defines the public Proof SDK flow for creating and operating on shared documents over HTTP. +This contract defines the public Proof SDK flow for creating and operating on shared documents over HTTP. The routes and semantics apply to all deployment targets (Express server, Cloudflare Workers, etc.) — replace `localhost:4000` with your deployment URL. ## Endpoints @@ -152,3 +152,5 @@ curl -X POST http://localhost:4000/documents \ -H "Content-Type: application/json" \ -d '{"markdown":"# Plan\n\nShip the rewrite.","title":"Rewrite Plan","role":"commenter"}' ``` + +See `docs/DEPLOYMENT.md` for deployment options and configuration. diff --git a/README.md b/README.md index 1f48f66..0f973d4 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,7 @@ If you want the hosted product, use [Proof](https://proofeditor.ai). - `packages/doc-store-sqlite` - `packages/agent-bridge` - `apps/proof-example` +- `apps/proof-cloudflare` - `server` - `src` @@ -89,8 +90,10 @@ npm test - `AGENT_CONTRACT.md` - `docs/agent-docs.md` +- `docs/DEPLOYMENT.md` - `docs/proof.SKILL.md` - `docs/adr/2026-03-proof-sdk-public-core.md` +- `docs/adr/2026-03-cloudflare-workers-deployment.md` ## License diff --git a/apps/proof-cloudflare/README.md b/apps/proof-cloudflare/README.md new file mode 100644 index 0000000..48673ae --- /dev/null +++ b/apps/proof-cloudflare/README.md @@ -0,0 +1,135 @@ +# Proof Cloudflare + +Cloudflare Workers + Durable Objects deployment target for the Proof SDK. Same editor, same agent bridge, different runtime. + +Each document gets its own Durable Object instance with embedded SQLite, Yjs collab, and WebSocket handling. D1 stores the cross-document catalog (slug-to-DO mapping). + +## Quick Start + +Prerequisites: Node.js 18+, Cloudflare account with Workers Paid ($5/mo), Wrangler CLI. + +```bash +# From repo root +npm install && npm run build + +# Create D1 database +wrangler d1 create proof-catalog +# Update database_id in wrangler.jsonc with the returned ID + +# Run migration +cd apps/proof-cloudflare +wrangler d1 migrations apply proof-catalog --remote + +# Deploy +npx wrangler deploy +``` + +## Local Development + +```bash +npm run build # build frontend assets (from repo root) +cd apps/proof-cloudflare +npm run dev # Miniflare with D1 + DO simulation +``` + +## Architecture + +``` +Worker (index.ts) +├── GET / → create doc, redirect to /d/:slug +├── GET /d/:slug → serve SPA (rewrite asset paths) +├── POST /documents → API doc creation +├── POST /share/markdown → create from raw markdown +├── GET /health → 200 OK +├── GET /.well-known/agent.json → agent discovery +├── /api/agent/:slug/* → route to Durable Object +├── /documents/:slug/* → route to Durable Object +└── /ws/:slug → route to Durable Object + +DocumentSession (Durable Object) +├── Yjs Y.Doc with SQLite persistence + compaction +├── Hocuspocus WebSocket protocol (auth, sync, awareness) +├── Agent bridge HTTP routes (see table below) +└── DODocumentStorage (events, idempotency, access control) +``` + +## Route Parity with Express + +The CF Worker implements the full agent bridge contract. Document lifecycle routes are also supported. Some Express-specific routes (collab management, legacy API paths) are not applicable to the DO architecture. + +### Agent Bridge (full parity) + +| Route | Method | Express | CF Worker | +|-------|--------|---------|-----------| +| `/api/agent/:slug/state` | GET | Y | Y | +| `/api/agent/:slug/snapshot` | GET | Y | Y | +| `/api/agent/:slug/edit` | POST | Y | Y | +| `/api/agent/:slug/edit/v2` | POST | Y | Y | +| `/api/agent/:slug/rewrite` | POST | Y | Y | +| `/api/agent/:slug/ops` | POST | Y | Y | +| `/api/agent/:slug/marks/comment` | POST | Y | Y | +| `/api/agent/:slug/marks/suggest-replace` | POST | Y | Y | +| `/api/agent/:slug/marks/suggest-insert` | POST | Y | Y | +| `/api/agent/:slug/marks/suggest-delete` | POST | Y | Y | +| `/api/agent/:slug/marks/accept` | POST | Y | Y | +| `/api/agent/:slug/marks/reject` | POST | Y | Y | +| `/api/agent/:slug/marks/reply` | POST | Y | Y | +| `/api/agent/:slug/marks/resolve` | POST | Y | Y | +| `/api/agent/:slug/marks/unresolve` | POST | Y | Y | +| `/api/agent/:slug/presence` | POST | Y | Y | +| `/api/agent/:slug/presence/disconnect` | POST | Y | Y | +| `/api/agent/:slug/events/pending` | GET | Y | Y | +| `/api/agent/:slug/events/ack` | POST | Y | Y | +| `/api/agent/:slug/repair` | POST | Y | Y | +| `/api/agent/:slug/clone-from-canonical` | POST | Y | Y | + +### Document Routes + +| Route | Method | Express | CF Worker | Notes | +|-------|--------|---------|-----------|-------| +| `POST /documents` | POST | Y | Y | | +| `POST /share/markdown` | POST | Y | Y | | +| `/documents/:slug/state` | GET | Y | Y | via DO | +| `/documents/:slug/snapshot` | GET | Y | Y | via DO | +| `/documents/:slug/content` | GET | Y | Y | via DO | +| `/documents/:slug/open-context` | GET | Y | Y | via DO | +| `/documents/:slug/collab-session` | GET | Y | Y | via DO | +| `/documents/:slug/collab-refresh` | POST | Y | Y | via DO | +| `/documents/:slug/events/pending` | GET | Y | Y | via DO | +| `/documents/:slug/events/ack` | POST | Y | Y | via DO | +| `/documents/:slug/pause` | POST | Y | Y | via DO | +| `/documents/:slug/resume` | POST | Y | Y | via DO | +| `/documents/:slug/revoke` | POST | Y | Y | via DO | +| `/documents/:slug/delete` | POST | Y | Y | via DO | +| `/documents/:slug/title` | PUT | Y | Y | via DO | + +### Express-Only Routes (not in CF Worker) + +| Route | Reason | +|-------|--------| +| `/api/capabilities` | Express middleware concern | +| `/d/:slug/bridge/*` | Neutral bridge mount — agents use `/api/agent/` or `/documents/` | +| `/api/documents` (legacy) | Legacy create route — use `POST /documents` | +| Collab management endpoints | DO handles collab internally; no external collab server to manage | + +## Files + +| File | Purpose | +|------|---------| +| `src/index.ts` | Worker entrypoint — routing, doc creation, SPA serving | +| `src/document-session.ts` | Durable Object — Yjs sync, agent routes, marks, edits | +| `src/document-engine.ts` | Marks CRUD (comment, suggest, accept/reject, reply, resolve) | +| `src/canonical-projection.ts` | Y.Doc <-> markdown via ProseMirror | +| `src/milkdown-headless.ts` | Headless Milkdown engine for Workers runtime | +| `src/storage-do.ts` | SQLite-backed events, idempotency, access control | +| `src/document-ops.ts` | Operation parsing and authorization for `ops` endpoint | +| `src/agent-edit-ops.ts` | Text-level edit operations (append, replace, insert) | +| `src/auth.ts` | Token resolution and role-based access | +| `src/idempotency.ts` | Mutation replay detection | +| `src/proof-span-strip.ts` | Proof span tag stripping for agent-facing markdown | + +## See Also + +- `docs/DEPLOYMENT.md` — full deployment guide (Express and Workers) +- `docs/adr/2026-03-cloudflare-workers-deployment.md` — decision record +- `AGENT_CONTRACT.md` — agent HTTP protocol diff --git a/apps/proof-cloudflare/d1/migrations/0001_catalog.sql b/apps/proof-cloudflare/d1/migrations/0001_catalog.sql new file mode 100644 index 0000000..e64a723 --- /dev/null +++ b/apps/proof-cloudflare/d1/migrations/0001_catalog.sql @@ -0,0 +1,10 @@ +CREATE TABLE documents ( + id TEXT PRIMARY KEY, + slug TEXT UNIQUE NOT NULL, + title TEXT NOT NULL DEFAULT '', + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + do_id TEXT NOT NULL +); + +CREATE INDEX idx_documents_slug ON documents(slug); diff --git a/apps/proof-cloudflare/package.json b/apps/proof-cloudflare/package.json new file mode 100644 index 0000000..85964ef --- /dev/null +++ b/apps/proof-cloudflare/package.json @@ -0,0 +1,32 @@ +{ + "name": "proof-cloudflare", + "version": "0.1.0", + "type": "module", + "private": true, + "scripts": { + "dev": "wrangler dev", + "deploy": "wrangler deploy" + }, + "dependencies": { + "@milkdown/core": "^7.5.0", + "@milkdown/kit": "^7.5.0", + "@milkdown/preset-commonmark": "^7.5.0", + "@milkdown/preset-gfm": "^7.5.0", + "@milkdown/prose": "^7.5.0", + "@milkdown/transformer": "^7.5.0", + "lib0": "^0.2.99", + "remark-frontmatter": "^5.0.0", + "remark-gfm": "^4.0.1", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0", + "y-prosemirror": "^1.2.19", + "y-protocols": "^1.0.6", + "yjs": "^13.6.0" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20250109.0", + "wrangler": "^4.0.0", + "typescript": "^5.3.0" + } +} diff --git a/apps/proof-cloudflare/src/agent-edit-ops.ts b/apps/proof-cloudflare/src/agent-edit-ops.ts new file mode 100644 index 0000000..87850b6 --- /dev/null +++ b/apps/proof-cloudflare/src/agent-edit-ops.ts @@ -0,0 +1,268 @@ +/** + * Agent edit operations pipeline. + * + * Implements append/replace/insert operations on raw markdown, handling + * proof-authored span wrapping, fenced code block detection, and anchor + * resolution through stripped (visible-text) matching. + */ + +import { + buildStrippedIndexMap, + expandRangeToIncludeFullyWrappedAuthoredSpan, + moveIndexPastTrailingAuthoredSpans, +} from './proof-span-strip.js'; + +export type AgentEditOperation = + | { op: 'append'; section: string; content: string } + | { op: 'replace'; search: string; content: string } + | { op: 'insert'; after: string; content: string }; + +export type AgentEditApplyResult = + | { ok: true; markdown: string } + | { ok: false; code: 'ANCHOR_NOT_FOUND'; message: string; opIndex: number }; + +function normalizeNewlines(input: string): string { + return input.replace(/\r\n/g, '\n'); +} + +function isWithinFencedCodeBlock(markdown: string, index: number): boolean { + const src = normalizeNewlines(markdown); + const upto = Math.max(0, Math.min(index, src.length)); + const lines = src.slice(0, upto).split('\n'); + let inFence = false; + for (const line of lines) { + const trimmed = line.trimStart(); + if (trimmed.startsWith('```') || trimmed.startsWith('~~~')) { + inFence = !inFence; + } + } + return inFence; +} + +function contentLooksInline(content: string): boolean { + const normalized = normalizeNewlines(content).trim(); + if (!normalized) return false; + if (normalized.includes('```') || normalized.includes('~~~')) return false; + if (/\n\s*\n/.test(normalized)) return false; // blank line => multi-paragraph / blocky + if (/^\s*#{1,6}\s+/.test(normalized)) return false; + if (/^\s*[-*+]\s+/.test(normalized)) return false; + if (/^\s*\d+\.\s+/.test(normalized)) return false; + if (/^\s*>/.test(normalized)) return false; + return true; +} + +function looksLikeInlineMarkdownFormatting(content: string): boolean { + // Keep authored HTML wrappers away from inline markdown tokens so we don't + // interfere with markdown parser round-tripping. + if (/(^|[^\\])`[^`\n]+`/.test(content)) return true; + if (/(^|[^\\])\*\*[^*\n]+?\*\*/.test(content)) return true; + if (/(^|[^\\])\*[^*\n]+?\*(?!\*)/.test(content)) return true; + if (/(^|[^\\])~~[^~\n]+?~~/.test(content)) return true; + return false; +} + +function maybeWrapAuthored(content: string, by: string | undefined, allow: boolean): string { + if (!allow) return content; + if (!by || !by.trim()) return content; + const normalized = content; + if (/data-proof\s*=\s*("|')authored(")?/i.test(normalized)) return content; + if (!contentLooksInline(normalized)) return content; + if (looksLikeInlineMarkdownFormatting(normalized)) return content; + // Keep as a single inline HTML wrapper so remarkProofMarks can parse it into a proofAuthored mark. + return `${normalized}`; +} + +function computeLineOffsets(src: string): number[] { + const offsets: number[] = [0]; + for (let i = 0; i < src.length; i++) { + if (src.charCodeAt(i) === 10 /* \n */) offsets.push(i + 1); + } + return offsets; +} + +function normalizeHeadingLabel(value: string): string { + const collapsed = normalizeNewlines(value).replace(/\s+/g, ' ').trim().toLowerCase(); + if (!collapsed) return ''; + // Allow section matching to ignore leading ordinal prefixes like: + // "4. Title", "4) Title", or "4.1 Title". + return collapsed.replace(/^\d+(?:\.\d+)*[.)]?\s+/, ''); +} + +function findSectionBoundaryIndex(lines: string[], offsets: number[], headingLineIndex: number): number { + const line = lines[headingLineIndex] ?? ''; + const m = line.match(/^(#{1,6})\s+/); + if (!m) return offsets[headingLineIndex] ?? 0; + const level = m[1].length; + for (let j = headingLineIndex + 1; j < lines.length; j++) { + const m2 = lines[j].match(/^(#{1,6})\s+/); + if (!m2) continue; + const nextLevel = m2[1].length; + if (nextLevel <= level) { + return offsets[j] ?? 0; + } + } + const lastOffset = offsets[offsets.length - 1]; + return typeof lastOffset === 'number' ? lastOffset + (lines[lines.length - 1] ?? '').length : 0; +} + +function findHeadingAppendIndex(src: string, section: string): number | null { + const lines = src.split('\n'); + const offsets = computeLineOffsets(src); + + const needle = section.trim(); + if (!needle) return null; + + let fallbackHeadingLineIndex: number | null = null; + const normalizedNeedle = normalizeHeadingLabel(needle); + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + const m = line.match(/^(#{1,6})\s+(.*?)(\s+#*\s*)?$/); + if (!m) continue; + const title = (m[2] || '').trim(); + if (title === needle) { + return findSectionBoundaryIndex(lines, offsets, i); + } + if (fallbackHeadingLineIndex === null && normalizedNeedle && normalizeHeadingLabel(title) === normalizedNeedle) { + fallbackHeadingLineIndex = i; + } + } + + if (fallbackHeadingLineIndex !== null) { + return findSectionBoundaryIndex(lines, offsets, fallbackHeadingLineIndex); + } + + return null; +} + +function spliceAt(src: string, index: number, insert: string): string { + return `${src.slice(0, index)}${insert}${src.slice(index)}`; +} + +function ensureLeadingBreak(insert: string, beforeChar: string | null): string { + if (!insert) return insert; + if (!beforeChar) return insert; + if (beforeChar === '\n') return insert; + return `\n${insert}`; +} + +function ensureTrailingBreak(insert: string, afterChar: string | null): string { + if (!insert) return insert; + if (!afterChar) return insert; + if (afterChar === '\n') return insert; + return `${insert}\n`; +} + +function resolveReplaceRange( + markdown: string, + search: string, +): { start: number; end: number } | null { + if (!search) return null; + + const directIdx = markdown.indexOf(search); + if (directIdx >= 0) { + return expandRangeToIncludeFullyWrappedAuthoredSpan(markdown, directIdx, directIdx + search.length); + } + + const { stripped, map } = buildStrippedIndexMap(markdown); + const strippedIdx = stripped.indexOf(search); + if (strippedIdx < 0) return null; + + const origStart = map[strippedIdx] ?? -1; + const origEnd = map[strippedIdx + search.length - 1]; + if (origStart < 0 || origEnd === undefined) return null; + return expandRangeToIncludeFullyWrappedAuthoredSpan(markdown, origStart, origEnd + 1); +} + +function resolveInsertAfterIndex(markdown: string, after: string): number { + if (!after) return -1; + + const directIdx = markdown.indexOf(after); + if (directIdx >= 0) { + return moveIndexPastTrailingAuthoredSpans(markdown, directIdx + after.length); + } + + const { stripped, map } = buildStrippedIndexMap(markdown); + const strippedIdx = stripped.indexOf(after); + if (strippedIdx < 0) return -1; + + const origEnd = map[strippedIdx + after.length - 1]; + if (origEnd === undefined) return -1; + return moveIndexPastTrailingAuthoredSpans(markdown, origEnd + 1); +} + +/** Apply a sequence of agent edit operations to markdown, returning the updated text or first failure. */ +export function applyAgentEditOperations( + markdown: string, + operations: AgentEditOperation[], + options?: { by?: string }, +): AgentEditApplyResult { + let src = normalizeNewlines(markdown ?? ''); + const by = options?.by; + + for (let opIndex = 0; opIndex < operations.length; opIndex++) { + const operation = operations[opIndex]; + if (operation.op === 'append') { + const idx = findHeadingAppendIndex(src, operation.section); + if (idx === null) { + const safeContent = operation.content ?? ''; + const block = `\n\n## ${operation.section.trim()}\n\n${safeContent.trim()}\n`; + src = `${src.replace(/\s+$/g, '')}${block}`; + continue; + } + const allowWrap = !isWithinFencedCodeBlock(src, idx); + const content = maybeWrapAuthored(operation.content ?? '', by, allowWrap); + const insertion = `\n\n${content.trim()}\n`; + src = spliceAt(src, idx, insertion); + continue; + } + + if (operation.op === 'replace') { + const search = operation.search ?? ''; + const range = resolveReplaceRange(src, search); + if (!range) { + return { + ok: false, + code: 'ANCHOR_NOT_FOUND', + message: `replace anchor not found: ${JSON.stringify(search)}`, + opIndex, + }; + } + const allowWrap = !isWithinFencedCodeBlock(src, range.start); + const content = maybeWrapAuthored(operation.content ?? '', by, allowWrap); + src = `${src.slice(0, range.start)}${content}${src.slice(range.end)}`; + continue; + } + + if (operation.op === 'insert') { + const after = operation.after ?? ''; + const insertAt = resolveInsertAfterIndex(src, after); + + if (insertAt < 0) { + return { + ok: false, + code: 'ANCHOR_NOT_FOUND', + message: `insert anchor not found: ${JSON.stringify(after)}`, + opIndex, + }; + } + const allowWrap = !isWithinFencedCodeBlock(src, insertAt); + const content = maybeWrapAuthored(operation.content ?? '', by, allowWrap); + + // Heuristic: if inserting after a heading line, insert on the next line with spacing. + const beforeChar = insertAt > 0 ? src[insertAt - 1] : null; + const afterChar = insertAt < src.length ? src[insertAt] : null; + let insertion = content; + if (afterChar === '\n') { + insertion = `\n\n${content.trim()}\n`; + } else { + insertion = ensureLeadingBreak(insertion, beforeChar); + insertion = ensureTrailingBreak(insertion, afterChar); + } + + src = spliceAt(src, insertAt, insertion); + continue; + } + } + + return { ok: true, markdown: src }; +} diff --git a/apps/proof-cloudflare/src/auth.ts b/apps/proof-cloudflare/src/auth.ts new file mode 100644 index 0000000..2b63948 --- /dev/null +++ b/apps/proof-cloudflare/src/auth.ts @@ -0,0 +1,75 @@ +/** + * Auth middleware for agent routes in Cloudflare Workers. + * + * Token resolution follows the same fallback chain as the Express server: + * x-share-token → x-bridge-token → Authorization: Bearer → ?token= query param + */ + +import type { DocumentStorage } from './storage-interface.js'; +import type { ShareRole } from './share-types.js'; + +const ROLE_RANK: Record = { + viewer: 0, + commenter: 1, + editor: 2, + owner_bot: 3, +}; + +/** + * Extract the presented secret from the request using the standard + * fallback chain. + */ +export function getPresentedSecret(request: Request): string | null { + const url = new URL(request.url); + + // 1. x-share-token header + const shareToken = request.headers.get('x-share-token'); + if (shareToken) return shareToken; + + // 2. x-bridge-token header + const bridgeToken = request.headers.get('x-bridge-token'); + if (bridgeToken) return bridgeToken; + + // 3. Authorization: Bearer + const authHeader = request.headers.get('authorization'); + if (authHeader?.startsWith('Bearer ')) { + return authHeader.slice(7); + } + + // 4. Query param ?token= (lowest priority — may carry stale tokens from shared URLs) + const queryToken = url.searchParams.get('token'); + if (queryToken) return queryToken; + + return null; +} + +/** + * Check whether the presented secret grants at least the required role. + * Returns the resolved role on success, or null if unauthorized. + */ +export function checkAuth( + storage: DocumentStorage, + slug: string, + secret: string | null, + requiredRole: ShareRole, +): { role: ShareRole } | null { + if (!secret) return null; + + const role = storage.resolveDocumentAccessRole(slug, secret); + if (!role) return null; + + const resolvedRank = ROLE_RANK[role] ?? -1; + const requiredRank = ROLE_RANK[requiredRole] ?? 0; + + if (resolvedRank < requiredRank) return null; + + return { role }; +} + +/** + * Extract the agent ID from request headers. + * Falls back to a default if not provided. + */ +export function getAgentId(request: Request): string { + return request.headers.get('x-agent-id') ?? 'anonymous-agent'; +} diff --git a/apps/proof-cloudflare/src/canonical-projection.ts b/apps/proof-cloudflare/src/canonical-projection.ts new file mode 100644 index 0000000..8143a19 --- /dev/null +++ b/apps/proof-cloudflare/src/canonical-projection.ts @@ -0,0 +1,165 @@ +/** + * Canonical Projection — Y.Doc ↔ Markdown conversion for Durable Objects. + * + * Converts between the live Y.Doc (ProseMirror XmlFragment) and the canonical + * markdown representation used by agent routes. This is the DO-native equivalent + * of the Express server's canonical-document.ts + collab.ts interaction. + */ + +import * as Y from "yjs"; +import { + yXmlFragmentToProseMirrorRootNode, + prosemirrorToYXmlFragment, +} from "y-prosemirror"; +import { + getHeadlessMilkdownParser, + serializeMarkdown, + parseMarkdownWithHtmlFallback, +} from "./milkdown-headless.js"; +import type { HeadlessMilkdownParser } from "./milkdown-headless.js"; + +export type { HeadlessMilkdownParser }; + +// Ephemeral collab span pattern (inserted by Yjs collab during typing) +const EPHEMERAL_COLLAB_SPAN_RE = + /\s*<\/span>/g; + +/** + * Strip empty proof authored spans that appear during live collab editing. + */ +export function stripEphemeralCollabSpans(markdown: string): string { + return markdown.replace(EPHEMERAL_COLLAB_SPAN_RE, ""); +} + +/** + * Derive canonical markdown from the Y.Doc's ProseMirror fragment. + * This is the primary read path for agent state requests. + */ +export async function deriveMarkdownFromYDoc( + doc: Y.Doc, +): Promise<{ markdown: string; parser: HeadlessMilkdownParser }> { + const parser = await getHeadlessMilkdownParser(); + const fragment = doc.getXmlFragment("prosemirror"); + + // If the fragment is empty, fall back to Y.Text "markdown" field + if (fragment.length === 0) { + const markdownText = doc.getText("markdown"); + const text = markdownText.toString(); + if (text) { + return { markdown: stripEphemeralCollabSpans(text), parser }; + } + return { markdown: "", parser }; + } + + try { + const pmDoc = yXmlFragmentToProseMirrorRootNode(fragment, parser.schema); + const raw = await serializeMarkdown(pmDoc); + const markdown = stripEphemeralCollabSpans(raw); + return { markdown, parser }; + } catch { + // If fragment → ProseMirror fails, fall back to the Y.Text markdown field + const markdownText = doc.getText("markdown"); + const fallback = stripEphemeralCollabSpans(markdownText.toString()); + return { markdown: fallback, parser }; + } +} + +/** + * Apply markdown content to the Y.Doc by replacing the ProseMirror fragment. + * This is the primary write path for agent edits and rewrites. + * + * Returns the new markdown (which may differ slightly from input due to + * ProseMirror normalization). + */ +export async function applyMarkdownToYDoc( + doc: Y.Doc, + markdown: string, + origin?: string, +): Promise { + const parser = await getHeadlessMilkdownParser(); + + // Parse markdown → ProseMirror document + const parseResult = parseMarkdownWithHtmlFallback(parser, markdown); + if (!parseResult.doc) { + throw new Error( + `Failed to parse markdown: ${parseResult.error instanceof Error ? parseResult.error.message : String(parseResult.error)}`, + ); + } + + const pmDoc = parseResult.doc; + + // Serialize the parsed doc back to get the normalized markdown + const normalized = stripEphemeralCollabSpans(await serializeMarkdown(pmDoc)); + + // Apply to Y.Doc in a transaction + doc.transact(() => { + const fragment = doc.getXmlFragment("prosemirror"); + + // Clear existing fragment + if (fragment.length > 0) { + fragment.delete(0, fragment.length); + } + + // Convert ProseMirror doc to Y.XmlFragment content + prosemirrorToYXmlFragment(pmDoc, fragment); + + // Also update the Y.Text markdown field for compatibility + const markdownText = doc.getText("markdown"); + applyYTextDiff(markdownText, normalized); + }, origin ?? "cf-canonical-projection"); + + return normalized; +} + +/** + * Apply a diff to a Y.Text by finding the minimal edit. + */ +export function applyYTextDiff(ytext: Y.Text, newContent: string): void { + const oldContent = ytext.toString(); + if (oldContent === newContent) return; + + // Find common prefix + let prefixLen = 0; + const minLen = Math.min(oldContent.length, newContent.length); + while (prefixLen < minLen && oldContent[prefixLen] === newContent[prefixLen]) { + prefixLen++; + } + + // Find common suffix (don't overlap with prefix) + let suffixLen = 0; + while ( + suffixLen < minLen - prefixLen && + oldContent[oldContent.length - 1 - suffixLen] === + newContent[newContent.length - 1 - suffixLen] + ) { + suffixLen++; + } + + const deleteLen = oldContent.length - prefixLen - suffixLen; + if (deleteLen > 0) { + ytext.delete(prefixLen, deleteLen); + } + + const insertText = newContent.slice(prefixLen, newContent.length - suffixLen); + if (insertText) { + ytext.insert(prefixLen, insertText); + } +} + +/** + * Compute a SHA-256 checksum of markdown content. + * Used for precondition validation in edit requests. + */ +export async function computeMarkdownChecksum( + markdown: string, +): Promise { + const encoder = new TextEncoder(); + const data = encoder.encode(markdown); + const hashBuffer = await crypto.subtle.digest("SHA-256", data); + const hashArray = new Uint8Array(hashBuffer); + let hex = ""; + for (const byte of hashArray) { + hex += byte.toString(16).padStart(2, "0"); + } + return hex; +} diff --git a/apps/proof-cloudflare/src/document-engine.ts b/apps/proof-cloudflare/src/document-engine.ts new file mode 100644 index 0000000..7cda7b9 --- /dev/null +++ b/apps/proof-cloudflare/src/document-engine.ts @@ -0,0 +1,441 @@ +/** + * DO-native document engine for marks CRUD. + * + * Simplified from server/document-engine.ts. In the DO, state is colocated + * and access is serialized — no need for optimistic concurrency, tombstones, + * or collab invalidation. + */ + +import * as Y from "yjs"; +import type { DocumentEventType } from "./event-types.js"; +import { detectMentions } from "./mention-detection.js"; +import { stripProofSpanTags } from "./proof-span-strip.js"; + +// --------------------------------------------------------------------------- +// Mark types (subset of src/formats/marks.ts) +// --------------------------------------------------------------------------- + +export interface StoredMark { + kind: string; + by: string; + createdAt: string; + quote?: string; + text?: string; + content?: string; + status?: string; + resolved?: boolean; + range?: { from: number; to: number }; + startRel?: string; + endRel?: string; + threadId?: string; + thread?: Array<{ by: string; text: string; at: string }>; + replies?: Array<{ by: string; text: string; at: string }>; + [key: string]: unknown; +} + +export type MarksMap = Record; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function generateMarkId(): string { + return crypto.randomUUID(); +} + +function now(): string { + return new Date().toISOString(); +} + +/** + * Read marks from the Y.Doc's "marks" map. + */ +export function readMarksFromYDoc(doc: Y.Doc): MarksMap { + const marksMap = doc.getMap("marks"); + if (marksMap.size === 0) return {}; + return marksMap.toJSON() as MarksMap; +} + +/** + * Write marks to the Y.Doc's "marks" map. + */ +export function writeMarksToYDoc(doc: Y.Doc, marks: MarksMap): void { + const marksMap = doc.getMap("marks"); + // Clear and rebuild + for (const key of Array.from(marksMap.keys())) { + if (!(key in marks)) { + marksMap.delete(key); + } + } + for (const [key, value] of Object.entries(marks)) { + marksMap.set(key, value); + } +} + +/** + * Find a quote anchor in the markdown text. + * Strips proof span tags before searching so quotes match the visible text. + * Returns the character offset in the stripped text, or -1 if not found. + */ +function findQuoteInMarkdown(markdown: string, quote: string): number { + if (!quote) return -1; + // Strip proof spans to match against visible text + const stripped = stripProofSpanTags(markdown); + const idx = stripped.indexOf(quote); + if (idx !== -1) return idx; + // Try with whitespace normalization + const normalized = stripped.replace(/\s+/g, " "); + const normalizedQuote = quote.replace(/\s+/g, " "); + return normalized.indexOf(normalizedQuote); +} + +// --------------------------------------------------------------------------- +// Mark operations +// --------------------------------------------------------------------------- + +export interface MarkOperationResult { + success: boolean; + markId?: string; + marks: MarksMap; + markdown: string; + error?: string; + errorCode?: string; + statusCode?: number; + eventType?: DocumentEventType; + eventData?: Record; +} + +/** + * Add a comment mark to the document. + */ +export function addComment( + doc: Y.Doc, + input: { by: string; text: string; quote?: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + const markId = generateMarkId(); + const quote = input.quote ?? ""; + + if (quote && findQuoteInMarkdown(markdown, quote) === -1) { + return { + success: false, + marks, + markdown, + error: "Anchor text not found in document", + errorCode: "ANCHOR_NOT_FOUND", + statusCode: 409, + }; + } + + const offset = quote ? findQuoteInMarkdown(markdown, quote) : 0; + + marks[markId] = { + kind: "comment", + by: input.by, + createdAt: now(), + quote, + text: input.text, + threadId: markId, + thread: [], + replies: [], + resolved: false, + startRel: `char:${offset}`, + endRel: `char:${offset + quote.length}`, + }; + + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId, + marks, + markdown, + eventType: "comment.added" as DocumentEventType, + eventData: { markId, by: input.by, quote, text: input.text }, + }; +} + +/** + * Add a suggestion mark (insert, delete, or replace). + */ +export function addSuggestion( + doc: Y.Doc, + kind: "insert" | "delete" | "replace", + input: { by: string; quote: string; content?: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + const markId = generateMarkId(); + + if (!input.quote) { + return { + success: false, + marks, + markdown, + error: "quote is required", + statusCode: 400, + }; + } + + if (findQuoteInMarkdown(markdown, input.quote) === -1) { + return { + success: false, + marks, + markdown, + error: "Anchor text not found in document", + errorCode: "ANCHOR_NOT_FOUND", + statusCode: 409, + }; + } + + const offset = findQuoteInMarkdown(markdown, input.quote); + + marks[markId] = { + kind, + by: input.by, + createdAt: now(), + quote: input.quote, + content: input.content ?? "", + status: "pending", + startRel: `char:${offset}`, + endRel: `char:${offset + input.quote.length}`, + }; + + writeMarksToYDoc(doc, marks); + + const eventType = `suggestion.${kind}.added` as DocumentEventType; + + return { + success: true, + markId, + marks, + markdown, + eventType, + eventData: { markId, by: input.by, quote: input.quote, content: input.content }, + }; +} + +/** + * Accept a suggestion — modifies the markdown text. + */ +export function acceptSuggestion( + doc: Y.Doc, + input: { markId: string; by?: string }, +): MarkOperationResult { + const markdownText = doc.getText("markdown"); + const markdown = markdownText.toString(); + const marks = readMarksFromYDoc(doc); + + const mark = marks[input.markId]; + if (!mark) { + return { + success: false, + marks, + markdown, + error: "Mark not found", + statusCode: 404, + }; + } + + // Idempotent — already accepted + if (mark.status === "accepted") { + return { success: true, markId: input.markId, marks, markdown }; + } + + mark.status = "accepted"; + + // Apply the suggestion to the markdown text. + // Work with stripped text (no proof spans) for clean replacement, + // then use that as the new canonical markdown. + const quote = mark.quote ?? ""; + const content = mark.content ?? ""; + const strippedMarkdown = stripProofSpanTags(markdown); + const idx = strippedMarkdown.indexOf(quote); + + if (idx !== -1 && quote) { + let newMarkdown: string; + if (mark.kind === "insert") { + newMarkdown = strippedMarkdown.slice(0, idx + quote.length) + content + strippedMarkdown.slice(idx + quote.length); + } else if (mark.kind === "delete") { + newMarkdown = strippedMarkdown.slice(0, idx) + strippedMarkdown.slice(idx + quote.length); + } else { + // replace + newMarkdown = strippedMarkdown.slice(0, idx) + content + strippedMarkdown.slice(idx + quote.length); + } + + // Apply diff to Y.Text + doc.transact(() => { + const oldLen = markdownText.length; + if (oldLen > 0) markdownText.delete(0, oldLen); + markdownText.insert(0, newMarkdown); + }, "cf-accept-suggestion"); + + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId: input.markId, + marks, + markdown: newMarkdown, + eventType: "suggestion.accepted" as DocumentEventType, + eventData: { markId: input.markId, status: "accepted", by: input.by ?? "unknown" }, + }; + } + + // Quote not found — still accept the status change but warn + writeMarksToYDoc(doc, marks); + return { + success: true, + markId: input.markId, + marks, + markdown, + eventType: "suggestion.accepted" as DocumentEventType, + eventData: { markId: input.markId, status: "accepted", by: input.by ?? "unknown" }, + }; +} + +/** + * Reject a suggestion — markdown unchanged, just status update. + */ +export function rejectSuggestion( + doc: Y.Doc, + input: { markId: string; by?: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + + const mark = marks[input.markId]; + if (!mark) { + return { + success: false, + marks, + markdown, + error: "Mark not found", + statusCode: 404, + }; + } + + if (mark.status === "rejected") { + return { success: true, markId: input.markId, marks, markdown }; + } + + mark.status = "rejected"; + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId: input.markId, + marks, + markdown, + eventType: "suggestion.rejected" as DocumentEventType, + eventData: { markId: input.markId, status: "rejected", by: input.by ?? "unknown" }, + }; +} + +/** + * Reply to a comment thread. + */ +export function replyComment( + doc: Y.Doc, + input: { markId: string; by: string; text: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + + const mark = marks[input.markId]; + if (!mark) { + return { + success: false, + marks, + markdown, + error: "Mark not found", + statusCode: 404, + }; + } + + const reply = { by: input.by, text: input.text, at: now() }; + if (!mark.thread) mark.thread = []; + if (!mark.replies) mark.replies = []; + mark.thread.push(reply); + mark.replies.push(reply); + + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId: input.markId, + marks, + markdown, + eventType: "comment.replied" as DocumentEventType, + eventData: { markId: input.markId, by: input.by, text: input.text }, + }; +} + +/** + * Resolve a comment. + */ +export function resolveComment( + doc: Y.Doc, + input: { markId: string; by?: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + + const mark = marks[input.markId]; + if (!mark) { + return { + success: false, + marks, + markdown, + error: "Mark not found", + statusCode: 404, + }; + } + + mark.resolved = true; + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId: input.markId, + marks, + markdown, + eventType: "comment.resolved" as DocumentEventType, + eventData: { markId: input.markId, by: input.by ?? "unknown" }, + }; +} + +/** + * Unresolve a comment. + */ +export function unresolveComment( + doc: Y.Doc, + input: { markId: string; by?: string }, +): MarkOperationResult { + const markdown = doc.getText("markdown").toString(); + const marks = readMarksFromYDoc(doc); + + const mark = marks[input.markId]; + if (!mark) { + return { + success: false, + marks, + markdown, + error: "Mark not found", + statusCode: 404, + }; + } + + mark.resolved = false; + writeMarksToYDoc(doc, marks); + + return { + success: true, + markId: input.markId, + marks, + markdown, + eventType: "comment.unresolved" as DocumentEventType, + eventData: { markId: input.markId, by: input.by ?? "unknown" }, + }; +} diff --git a/apps/proof-cloudflare/src/document-ops.ts b/apps/proof-cloudflare/src/document-ops.ts new file mode 100644 index 0000000..a1baad4 --- /dev/null +++ b/apps/proof-cloudflare/src/document-ops.ts @@ -0,0 +1,101 @@ +/** + * Document operation parsing and authorization for the agent `ops` endpoint. + * + * Parses incoming operation requests into typed operations, resolves them to + * internal mark/rewrite routes, and checks role-based access control. + */ + +import type { ShareRole } from './share-types.js'; + +export type DocumentOpType = + | 'comment.add' + | 'comment.reply' + | 'comment.resolve' + | 'comment.unresolve' + | 'suggestion.add' + | 'suggestion.accept' + | 'suggestion.reject' + | 'rewrite.apply'; + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null && !Array.isArray(value); +} + +/** Parse and validate an agent's document operation request into a typed operation. */ +export function parseDocumentOpRequest(body: unknown): { op: DocumentOpType; payload: Record } | { error: string } { + const raw = isRecord(body) ? body : {}; + const op = (raw.type ?? raw.op) as DocumentOpType | undefined; + if (!op) return { error: 'Missing operation type' }; + + const payload = (isRecord(raw.payload) ? { ...(raw.payload as Record) } : { ...raw }) as Record; + delete payload.type; + delete payload.op; + delete payload.payload; + + return { op, payload }; +} + +/** Map an operation type to its internal route (e.g. comment.add → /marks/comment). */ +export function resolveDocumentOpRoute( + opType: DocumentOpType, + payload: Record, +): { method: 'POST'; path: string; body: Record } | null { + switch (opType) { + case 'comment.add': + return { method: 'POST', path: '/marks/comment', body: payload }; + case 'comment.reply': + return { method: 'POST', path: '/marks/reply', body: payload }; + case 'comment.resolve': + return { method: 'POST', path: '/marks/resolve', body: payload }; + case 'comment.unresolve': + return { method: 'POST', path: '/marks/unresolve', body: payload }; + case 'suggestion.accept': + return { method: 'POST', path: '/marks/accept', body: payload }; + case 'suggestion.reject': + return { method: 'POST', path: '/marks/reject', body: payload }; + case 'rewrite.apply': + return { method: 'POST', path: '/rewrite', body: payload }; + case 'suggestion.add': { + const kind = typeof payload.kind === 'string' ? payload.kind : ''; + if (kind === 'insert') return { method: 'POST', path: '/marks/suggest-insert', body: payload }; + if (kind === 'delete') return { method: 'POST', path: '/marks/suggest-delete', body: payload }; + if (kind === 'replace') return { method: 'POST', path: '/marks/suggest-replace', body: payload }; + return null; + } + default: + return null; + } +} + +/** Check whether the caller's role permits the requested operation. Returns null if allowed, error string if denied. */ +export function authorizeDocumentOp( + type: DocumentOpType, + accessRole: ShareRole | null, + ownerAuthorized: boolean, + shareState: string, +): string | null { + if (shareState === 'DELETED') return 'Document deleted'; + if (shareState === 'REVOKED' && !ownerAuthorized) return 'Document access has been revoked'; + + if (ownerAuthorized || accessRole === 'owner_bot') return null; + if (shareState !== 'ACTIVE') return 'Document is paused'; + + const isEditor = accessRole === 'editor'; + const isCommenter = accessRole === 'commenter'; + switch (type) { + case 'comment.add': + case 'comment.reply': + case 'comment.resolve': + case 'comment.unresolve': + case 'suggestion.add': + if (isEditor || isCommenter) return null; + return 'Insufficient role for operation'; + case 'suggestion.accept': + case 'suggestion.reject': + case 'rewrite.apply': + if (isEditor) return null; + return 'Insufficient role for operation'; + default: + return 'Unsupported operation'; + } +} diff --git a/apps/proof-cloudflare/src/document-session.ts b/apps/proof-cloudflare/src/document-session.ts new file mode 100644 index 0000000..9a0509e --- /dev/null +++ b/apps/proof-cloudflare/src/document-session.ts @@ -0,0 +1,1822 @@ +import { DurableObject } from "cloudflare:workers"; +import * as Y from "yjs"; +import * as syncProtocol from "y-protocols/sync"; +import * as awarenessProtocol from "y-protocols/awareness"; +import * as encoding from "lib0/encoding"; +import * as decoding from "lib0/decoding"; + +import { deriveMarkdownFromYDoc, applyMarkdownToYDoc } from "./canonical-projection.js"; +import { stripProofSpanTags } from "./proof-span-strip.js"; +import { DODocumentStorage } from "./storage-do.js"; +import { + addComment, + addSuggestion, + acceptSuggestion, + rejectSuggestion, + replyComment, + resolveComment, + unresolveComment, + type MarkOperationResult, +} from "./document-engine.js"; +import { applyAgentEditOperations, type AgentEditOperation } from "./agent-edit-ops.js"; +import { stripProofSpanTags as stripSpansForEdit } from "./proof-span-strip.js"; +import { getPresentedSecret, checkAuth, getAgentId } from "./auth.js"; +import { + getIdempotencyKey, + checkIdempotency, + storeIdempotencyResult, +} from "./idempotency.js"; + +export interface Env { + DOCUMENT_SESSION: DurableObjectNamespace; + CATALOG_DB: D1Database; + ASSETS: Fetcher; +} + +/** + * Hocuspocus wire protocol message types. + * Every message is framed as: varString(documentName) + varUint(type) + payload + */ +const MessageType = { + Sync: 0, + Awareness: 1, + Auth: 2, + QueryAwareness: 3, + Stateless: 5, + CLOSE: 7, + SyncStatus: 8, +} as const; + +/** Auth sub-message types (nested inside MessageType.Auth). */ +const AuthMessageType = { + Token: 0, + PermissionDenied: 1, + Authenticated: 2, +} as const; + +/** Number of incremental updates before compacting into a single snapshot. */ +const COMPACTION_THRESHOLD = 200; + +/** Milliseconds to wait after last client disconnects before running cleanup. */ +const CLEANUP_DELAY_MS = 30_000; + +/** + * Durable Object managing a single document's collaborative session. + * + * Each instance is keyed by document slug and owns: + * - A Y.Doc with Hocuspocus-compatible Yjs sync over WebSocket + * - SQLite storage for persisted document state + * - Awareness broadcasting for cursors/presence + */ +export class DocumentSession extends DurableObject { + private doc: Y.Doc | null = null; + private awareness: awarenessProtocol.Awareness | null = null; + private updateCount = 0; + /** The document name (slug) used for Hocuspocus protocol framing. */ + private roomName: string | null = null; + /** Per-document storage for agent routes (events, idempotency, access). */ + private docStorage: DODocumentStorage | null = null; + /** Maps each WebSocket to the awareness client IDs it controls. */ + private socketAwarenessClients = new Map>(); + + constructor(ctx: DurableObjectState, env: Env) { + super(ctx, env); + this.initStorage(); + } + + /** Get or create the DODocumentStorage for this document's slug. */ + private ensureDocStorage(slug: string): DODocumentStorage { + if (this.docStorage) return this.docStorage; + this.docStorage = new DODocumentStorage({ ctx: this.ctx, slug }); + return this.docStorage; + } + + private initStorage(): void { + const sql = this.ctx.storage.sql; + + // Yjs persistence tables + sql.exec(`CREATE TABLE IF NOT EXISTS document_state ( + key TEXT PRIMARY KEY, + value BLOB NOT NULL, + updated_at TEXT NOT NULL DEFAULT (datetime('now')) + )`); + sql.exec(`CREATE TABLE IF NOT EXISTS yjs_updates ( + id INTEGER PRIMARY KEY, + update_data BLOB NOT NULL, + created_at TEXT NOT NULL DEFAULT (datetime('now')) + )`); + + // Agent bridge tables (events, idempotency, access control) + // Initialized here at DO construction so they don't add CPU cost during requests. + sql.exec(`CREATE TABLE IF NOT EXISTS documents ( + slug TEXT PRIMARY KEY, doc_id TEXT, title TEXT, + markdown TEXT NOT NULL DEFAULT '', marks TEXT NOT NULL DEFAULT '{}', + revision INTEGER NOT NULL DEFAULT 0, y_state_version INTEGER NOT NULL DEFAULT 0, + share_state TEXT NOT NULL DEFAULT 'ACTIVE', access_epoch INTEGER NOT NULL DEFAULT 0, + active INTEGER NOT NULL DEFAULT 1, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), deleted_at TEXT + )`); + sql.exec(`CREATE TABLE IF NOT EXISTS document_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, document_slug TEXT NOT NULL, + document_revision INTEGER, event_type TEXT NOT NULL, + event_data TEXT NOT NULL DEFAULT '{}', actor TEXT NOT NULL DEFAULT '', + idempotency_key TEXT, created_at TEXT NOT NULL DEFAULT (datetime('now')), + acked_by TEXT, acked_at TEXT + )`); + sql.exec(`CREATE TABLE IF NOT EXISTS mutation_outbox ( + id INTEGER PRIMARY KEY AUTOINCREMENT, document_slug TEXT NOT NULL, + document_revision INTEGER, event_id INTEGER, event_type TEXT NOT NULL, + event_data TEXT NOT NULL DEFAULT '{}', actor TEXT NOT NULL DEFAULT '', + idempotency_key TEXT, created_at TEXT NOT NULL DEFAULT (datetime('now')), + delivered_at TEXT + )`); + sql.exec(`CREATE TABLE IF NOT EXISTS idempotency_keys ( + idempotency_key TEXT NOT NULL, document_slug TEXT NOT NULL, + route TEXT NOT NULL, response_json TEXT NOT NULL, + request_hash TEXT, status_code INTEGER NOT NULL DEFAULT 200, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (idempotency_key, document_slug, route) + )`); + sql.exec(`CREATE TABLE IF NOT EXISTS document_access ( + token_id TEXT PRIMARY KEY, document_slug TEXT NOT NULL, + role TEXT NOT NULL, secret_hash TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT (datetime('now')), revoked_at TEXT + )`); + } + + /** + * Lazily initialize the Y.Doc from persisted SQLite state. + * Called on first WebSocket connection or HTTP snapshot request. + */ + private ensureDoc(): Y.Doc { + if (this.doc) return this.doc; + + const doc = new Y.Doc(); + const sql = this.ctx.storage.sql; + + // Load base snapshot if present + const snapshotRows = sql + .exec("SELECT value FROM document_state WHERE key = 'yjs_snapshot'") + .toArray(); + + if (snapshotRows.length > 0) { + const snapshotValue = snapshotRows[0]["value"]; + if (snapshotValue instanceof ArrayBuffer) { + Y.applyUpdate(doc, new Uint8Array(snapshotValue)); + } + } + + // Apply incremental updates on top of snapshot + const updateRows = sql + .exec("SELECT update_data FROM yjs_updates ORDER BY id ASC") + .toArray(); + + for (const row of updateRows) { + const updateValue = row["update_data"]; + if (updateValue instanceof ArrayBuffer) { + Y.applyUpdate(doc, new Uint8Array(updateValue)); + } + } + + this.updateCount = updateRows.length; + + // Listen for updates from any source (local apply or remote sync) + // and persist them. Only websocket-origin updates are broadcast. + doc.on("update", (update: Uint8Array, origin: unknown) => { + // Updates applied during load (origin undefined/null) are already persisted. + if (origin !== undefined && origin !== null) { + this.persistUpdate(update); + } + if (origin instanceof WebSocket) { + this.broadcastUpdate(update, origin); + } + }); + + this.doc = doc; + this.awareness = new awarenessProtocol.Awareness(doc); + // Disable the awareness check interval — DO hibernation handles lifecycle + if (this.awareness._checkInterval) { + clearInterval(this.awareness._checkInterval); + this.awareness._checkInterval = 0; + } + + return doc; + } + + async fetch(request: Request): Promise { + const url = new URL(request.url); + + // WebSocket upgrade + if (request.headers.get("Upgrade") === "websocket") { + return this.handleWebSocketUpgrade(url); + } + + // Agent bridge routes — must match before generic /snapshot, /content etc. + const agentMatch = url.pathname.match(/\/agent\/([^/]+)\/(.+)$/); + if (agentMatch) { + return this.handleAgentRoute(request, url, agentMatch[2]); + } + + // HTTP API endpoints (editor-facing) + if (url.pathname.endsWith("/content")) { + return this.handleContentRequest(request); + } + + if (url.pathname.endsWith("/snapshot")) { + return this.handleSnapshotRequest(); + } + + if (url.pathname.endsWith("/open-context")) { + return this.handleOpenContext(request); + } + + if (url.pathname.endsWith("/collab-session")) { + return this.handleCollabSession(request); + } + + if (url.pathname.endsWith("/collab-refresh")) { + return this.handleCollabSession(request); + } + + // GET /documents/:slug (bare) — return document data + if (url.pathname.match(/\/documents\/[^/]+\/?$/) && request.method === "GET") { + return this.handleGetDocument(request); + } + + // PUT /documents/:slug/title + if (url.pathname.endsWith("/title") && request.method === "PUT") { + return this.handleUpdateTitle(request); + } + + // POST /documents/:slug/events/ack + if (url.pathname.endsWith("/events/ack") && request.method === "POST") { + return this.handleEventsAck(request); + } + + // Document lifecycle endpoints + if (url.pathname.endsWith("/pause") && request.method === "POST") { + return this.handleLifecycle(request, "PAUSED"); + } + if (url.pathname.endsWith("/resume") && request.method === "POST") { + return this.handleLifecycle(request, "ACTIVE"); + } + if (url.pathname.endsWith("/revoke") && request.method === "POST") { + return this.handleLifecycle(request, "REVOKED"); + } + if (url.pathname.endsWith("/delete") && request.method === "POST") { + return this.handleLifecycle(request, "DELETED"); + } + if (request.method === "DELETE" && url.pathname.match(/\/documents\/[^/]+\/?$/)) { + return this.handleLifecycle(request, "DELETED"); + } + + // GET /documents/:slug/events/pending (real implementation) + if (url.pathname.endsWith("/events/pending") && request.method === "GET") { + return this.handleEventsPending(request); + } + + return Response.json( + { error: "Not found", path: url.pathname }, + { status: 404 }, + ); + } + + /** + * Returns the open-context payload the editor needs to initialize. + * Mirrors the shape from the Express server's /documents/:slug/open-context. + */ + private async handleOpenContext(request: Request): Promise { + const url = new URL(request.url); + const slug = this.getSlugFromRequest(url); + const metadata = await this.readDocumentMetadata(slug); + + // Derive canonical markdown from the ProseMirror fragment + const ydoc = this.ensureDoc(); + let markdown = ""; + try { + const projection = await deriveMarkdownFromYDoc(ydoc); + markdown = projection.markdown; + } catch { + markdown = ydoc.getText("markdown").toString(); + } + + // Read marks from DODocumentStorage if available + const storage = this.ensureDocStorage(slug); + let marks: Record = {}; + try { + const marksJson = storage.getMarks(slug); + if (marksJson) marks = JSON.parse(marksJson); + } catch { + // Ignore parse errors + } + + return Response.json({ + success: true, + doc: { + slug, + docId: metadata.docId, + title: metadata.title, + markdown, + marks, + shareState: "ACTIVE", + active: true, + createdAt: metadata.createdAt, + updatedAt: metadata.updatedAt, + viewers: this.getWebSockets().length, + }, + session: this.buildCollabSession(url, slug), + capabilities: { + canRead: true, + canEdit: true, + canComment: true, + canSuggest: true, + canApprove: false, + canDelete: false, + canPause: false, + canRevoke: false, + canManageAccess: false, + }, + links: { + webUrl: `${url.origin}/d/${encodeURIComponent(slug)}`, + snapshotUrl: null, + }, + }); + } + + private handleCollabSession(request: Request): Response { + const url = new URL(request.url); + const slug = this.getSlugFromRequest(url); + + return Response.json({ + success: true, + session: this.buildCollabSession(url, slug), + capabilities: { + canRead: true, + canEdit: true, + canComment: true, + }, + }); + } + + /** Build a CollabSessionInfo matching the shape the editor validates. */ + private buildCollabSession(url: URL, slug: string) { + // Use path-based routing: /ws/ + // The HocuspocusProvider strips ?slug= query params from collabWsUrl, + // but preserves the path. The worker extracts the slug from the path. + const wsUrl = `${url.origin.replace("http", "ws")}/ws/${encodeURIComponent(slug)}`; + return { + docId: slug, + slug, + role: "editor", + shareState: "ACTIVE", + accessEpoch: 1, + syncProtocol: "pm-yjs-v1", + collabWsUrl: wsUrl, + token: "cf-session", + snapshotVersion: 0, + }; + } + + private async handleGetDocument(request: Request): Promise { + const url = new URL(request.url); + const slug = this.getSlugFromRequest(url); + const metadata = await this.readDocumentMetadata(slug); + + const ydoc = this.ensureDoc(); + let markdown = ""; + try { + const projection = await deriveMarkdownFromYDoc(ydoc); + markdown = projection.markdown; + } catch { + markdown = ydoc.getText("markdown").toString(); + } + + const storage = this.ensureDocStorage(slug); + let marks: Record = {}; + try { + const marksJson = storage.getMarks(slug); + if (marksJson) marks = JSON.parse(marksJson); + } catch { + // Ignore parse errors + } + + return Response.json({ + success: true, + slug, + docId: metadata.docId, + title: metadata.title, + markdown, + marks, + shareState: "ACTIVE", + active: true, + createdAt: metadata.createdAt, + updatedAt: metadata.updatedAt, + }); + } + + private async handleUpdateTitle(request: Request): Promise { + const body = await request.json().catch(() => ({})) as Record; + const url = new URL(request.url); + const slug = this.getSlugFromRequest(url); + const title = typeof body.title === "string" ? body.title : ""; + await this.env.CATALOG_DB.prepare( + "UPDATE documents SET title = ?, updated_at = datetime('now') WHERE slug = ?", + ) + .bind(title, slug) + .run(); + return Response.json({ + success: true, + title, + updatedAt: new Date().toISOString(), + }); + } + + private async readDocumentMetadata(slug: string): Promise<{ + docId: string; + title: string; + createdAt: string; + updatedAt: string; + }> { + const row = await this.env.CATALOG_DB.prepare( + "SELECT id, title, created_at, updated_at FROM documents WHERE slug = ?", + ) + .bind(slug) + .first>(); + + return { + docId: typeof row?.id === "string" ? row.id : slug, + title: typeof row?.title === "string" ? row.title : "", + createdAt: typeof row?.created_at === "string" ? row.created_at : new Date().toISOString(), + updatedAt: typeof row?.updated_at === "string" ? row.updated_at : new Date().toISOString(), + }; + } + + private getSlugFromRequest(url: URL): string { + // Extract slug from /api/documents/:slug/... or /documents/:slug/... + const match = url.pathname.match(/\/documents\/([^/]+)/); + return match ? decodeURIComponent(match[1]) : "unknown"; + } + + /** Extract slug from /api/agent/:slug/... paths. */ + private getAgentSlugFromRequest(url: URL): string { + const match = url.pathname.match(/\/agent\/([^/]+)/); + return match ? decodeURIComponent(match[1]) : "unknown"; + } + + // --------------------------------------------------------------------------- + // Events (real implementation using DODocumentStorage) + // --------------------------------------------------------------------------- + + private handleEventsPending(request: Request): Response { + try { + const url = new URL(request.url); + // Try document path first, then agent path for slug extraction + const docSlug = url.pathname.match(/\/documents\/([^/]+)/) + ? this.getSlugFromRequest(url) + : null; + const slug = docSlug ?? this.getAgentSlugFromRequest(url); + const storage = this.ensureDocStorage(slug); + + const afterParam = url.searchParams.get("after") ?? "0"; + const afterId = Number.parseInt(afterParam, 10); + const cursor = Number.isFinite(afterId) ? Math.max(0, afterId) : 0; + const limit = Math.min( + Number.parseInt(url.searchParams.get("limit") ?? "50", 10) || 50, + 500, + ); + + const events = storage.listDocumentEvents(slug, cursor, limit); + const nextCursor = events.length > 0 + ? events[events.length - 1].id + : cursor; + + return Response.json({ + success: true, + events: events.map((e) => ({ + id: e.id, + type: e.event_type, + data: typeof e.event_data === "string" ? JSON.parse(e.event_data || "{}") : e.event_data, + actor: e.actor, + createdAt: e.created_at, + ackedAt: e.acked_at, + ackedBy: e.acked_by, + })), + cursor: nextCursor, + }); + } catch (error) { + return Response.json({ + success: false, + error: error instanceof Error ? error.message : String(error), + }, { status: 500 }); + } + } + + private async handleEventsAck(request: Request): Promise { + const url = new URL(request.url); + const docSlug = url.pathname.match(/\/documents\/([^/]+)/) + ? this.getSlugFromRequest(url) + : null; + const slug = docSlug ?? this.getAgentSlugFromRequest(url); + const storage = this.ensureDocStorage(slug); + const body = await request.json().catch(() => ({})) as Record; + + const upToId = typeof body.upToId === "number" ? body.upToId : 0; + const ackedBy = typeof body.by === "string" ? body.by : getAgentId(request); + + const acked = storage.ackDocumentEvents(slug, upToId, ackedBy); + return Response.json({ success: true, acked }); + } + + // --------------------------------------------------------------------------- + // Agent bridge route dispatcher + // --------------------------------------------------------------------------- + + private async handleAgentRoute( + request: Request, + url: URL, + subPath: string, + ): Promise { + const slug = this.getAgentSlugFromRequest(url); + + // Auth check — if a token is presented, verify it and enforce role. + // If no token, allow access (open/no-auth mode per discovery endpoint). + const secret = getPresentedSecret(request); + if (secret) { + const storage = this.ensureDocStorage(slug); + const requiredRole = getRequiredRole(subPath, request.method); + const auth = checkAuth(storage, slug, secret, requiredRole); + if (!auth) { + return Response.json( + { error: "Insufficient permissions", requiredRole }, + { status: 403 }, + ); + } + } + + // Agent events + if (subPath === "events/pending" && request.method === "GET") { + return this.handleEventsPending(request); + } + if (subPath === "events/ack" && request.method === "POST") { + return this.handleEventsAck(request); + } + + // --- Routes below here return 501 until implemented in later phases --- + + // Phase 3: Read routes + if (subPath === "state" && request.method === "GET") { + return this.handleAgentState(request, url, slug); + } + if (subPath === "snapshot" && request.method === "GET") { + return this.handleAgentSnapshot(request, url, slug); + } + + // Phase 4: Marks CRUD + if (subPath.startsWith("marks/") && request.method === "POST") { + return this.handleMarksRoute(request, slug, subPath.slice(6)); + } + + // Phase 5: Edits + if (subPath === "edit" && request.method === "POST") { + return this.handleAgentEdit(request, slug); + } + if (subPath === "edit/v2" && request.method === "POST") { + return this.handleAgentEditV2(request, slug); + } + + // Phase 6: Rewrite, ops, presence + if (subPath === "rewrite" && request.method === "POST") { + return this.handleAgentRewrite(request, slug); + } + if (subPath === "ops" && request.method === "POST") { + return this.handleAgentOps(request, slug); + } + if (subPath === "presence" && request.method === "POST") { + return this.handleAgentPresence(request, slug); + } + if (subPath === "presence/disconnect" && request.method === "POST") { + return this.handleAgentPresenceDisconnect(request); + } + + return Response.json( + { error: "Not found", path: url.pathname }, + { status: 404 }, + ); + } + + // --------------------------------------------------------------------------- + // Agent state + snapshot handlers + // --------------------------------------------------------------------------- + + private async handleAgentState( + request: Request, + url: URL, + slug: string, + ): Promise { + const metadata = await this.readDocumentMetadata(slug); + const ydoc = this.ensureDoc(); + + // Derive markdown from the ProseMirror fragment (what the browser sees) + let markdown = ""; + let projectionSource = "fragment"; + try { + const projection = await deriveMarkdownFromYDoc(ydoc); + markdown = stripProofSpanTags(projection.markdown); + } catch (err) { + // Fall back to Y.Text if fragment conversion fails + projectionSource = "ytext-fallback"; + markdown = stripProofSpanTags(ydoc.getText("markdown").toString()); + } + + // Read marks from the Y.Doc marks map if available + let marks: Record = {}; + try { + const marksMap = ydoc.getMap("marks"); + if (marksMap.size > 0) { + marks = marksMap.toJSON(); + } + } catch { + // Ignore + } + + const revision = this.updateCount; + const baseUrl = url.origin; + + return Response.json({ + success: true, + slug, + docId: metadata.docId, + title: metadata.title, + markdown, + marks, + revision, + shareState: "ACTIVE", + stage: "mutation_ready", + capabilities: { + snapshotV2: true, + editV2: true, + topLevelOnly: false, + mutationReady: true, + }, + _links: { + self: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/state`, + edit: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/edit/v2`, + snapshot: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/snapshot`, + ops: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/ops`, + presence: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/presence`, + events: `${baseUrl}/api/agent/${encodeURIComponent(slug)}/events/pending`, + }, + agent: { + name: getAgentId(request), + color: "#6366f1", + avatar: "", + }, + viewers: this.getWebSockets().length, + createdAt: metadata.createdAt, + updatedAt: metadata.updatedAt, + }); + } + + private async handleAgentSnapshot( + _request: Request, + url: URL, + slug: string, + ): Promise { + const metadata = await this.readDocumentMetadata(slug); + const ydoc = this.ensureDoc(); + + let markdown = ""; + try { + const projection = await deriveMarkdownFromYDoc(ydoc); + markdown = stripProofSpanTags(projection.markdown); + } catch { + markdown = stripProofSpanTags(ydoc.getText("markdown").toString()); + } + const revision = this.updateCount; + + // Read marks from Y.Doc + let marks: Record = {}; + try { + const marksMap = ydoc.getMap("marks"); + if (marksMap.size > 0) { + marks = marksMap.toJSON(); + } + } catch { + // Ignore + } + + return Response.json({ + success: true, + slug, + revision, + title: metadata.title, + markdown, + marks, + y_state_version: 0, + access_epoch: 0, + collab: { + status: "converged", + markdownStatus: "converged", + fragmentStatus: "converged", + canonicalStatus: "converged", + }, + pendingEvents: { + count: 0, + types: [], + }, + _links: { + self: `${url.origin}/api/agent/${encodeURIComponent(slug)}/snapshot`, + state: `${url.origin}/api/agent/${encodeURIComponent(slug)}/state`, + edit: `${url.origin}/api/agent/${encodeURIComponent(slug)}/edit/v2`, + }, + }); + } + + // --------------------------------------------------------------------------- + // Document lifecycle handler + // --------------------------------------------------------------------------- + + private async handleLifecycle( + request: Request, + newState: string, + ): Promise { + const url = new URL(request.url); + const slug = this.getSlugFromRequest(url); + + await this.env.CATALOG_DB.prepare( + "UPDATE documents SET title = COALESCE(title, ''), updated_at = datetime('now') WHERE slug = ?", + ) + .bind(slug) + .run(); + + // Persist the lifecycle state in DO SQLite so it survives reconnects + const sql = this.ctx.storage.sql; + sql.exec( + `CREATE TABLE IF NOT EXISTS document_meta (key TEXT PRIMARY KEY, value TEXT NOT NULL)`, + ); + sql.exec( + `INSERT OR REPLACE INTO document_meta (key, value) VALUES ('share_state', ?)`, + newState, + ); + + // For revoke/delete, close all WebSocket connections + if (newState === "REVOKED" || newState === "DELETED") { + for (const ws of this.getWebSockets()) { + try { ws.close(1000, `Document ${newState.toLowerCase()}`); } catch { /* ignore */ } + } + } + + return Response.json({ + success: true, + slug, + shareState: newState, + updatedAt: new Date().toISOString(), + }); + } + + // --------------------------------------------------------------------------- + // Rewrite, ops, and presence handlers + // --------------------------------------------------------------------------- + + /** + * Full document rewrite — replaces the entire markdown content. + */ + private async handleAgentRewrite( + request: Request, + slug: string, + ): Promise { + const body = await request.json().catch(() => ({})) as Record; + const doc = this.ensureDoc(); + const markdownText = doc.getText("markdown"); + + // Accept either `content` (full replacement) or `markdown` field + const newContent = typeof body.content === "string" + ? body.content + : typeof body.markdown === "string" + ? body.markdown + : null; + + if (newContent === null) { + return Response.json( + { success: false, error: "content or markdown field required" }, + { status: 400 }, + ); + } + + // Check live client gate (unless force=true) + const force = body.force === true; + if (!force && this.getWebSockets().length > 0) { + return Response.json( + { success: false, error: "REWRITE_BLOCKED_BY_LIVE_CLIENTS", liveClients: this.getWebSockets().length }, + { status: 409 }, + ); + } + + // Apply rewrite to ProseMirror fragment + let finalMarkdown: string; + try { + finalMarkdown = await applyMarkdownToYDoc(doc, newContent, "cf-agent-rewrite"); + } catch { + const markdownText = doc.getText("markdown"); + doc.transact(() => { + const oldLen = markdownText.length; + if (oldLen > 0) markdownText.delete(0, oldLen); + markdownText.insert(0, newContent); + }, "cf-agent-rewrite"); + finalMarkdown = newContent; + } + + return Response.json({ + success: true, + revision: this.updateCount, + markdown: finalMarkdown, + }); + } + + /** + * Ops meta-dispatcher — routes { type, payload } to marks/edit handlers. + */ + private async handleAgentOps( + request: Request, + slug: string, + ): Promise { + const body = await request.json().catch(() => ({})) as Record; + const opType = typeof body.type === "string" ? body.type : ""; + const payload = typeof body.payload === "object" && body.payload !== null + ? body.payload as Record + : body; + + // Build a new request with the extracted payload so downstream handlers + // can call request.json() without hitting an already-consumed body. + const forwardRequest = new Request(request.url, { + method: request.method, + headers: request.headers, + body: JSON.stringify(payload), + }); + + // Route to appropriate handler based on op type + switch (opType) { + case "comment.add": + return this.handleMarksRoute(forwardRequest, slug, "comment"); + case "suggest.replace": + return this.handleMarksRoute(forwardRequest, slug, "suggest-replace"); + case "suggest.insert": + return this.handleMarksRoute(forwardRequest, slug, "suggest-insert"); + case "suggest.delete": + return this.handleMarksRoute(forwardRequest, slug, "suggest-delete"); + case "suggest.accept": + return this.handleMarksRoute(forwardRequest, slug, "accept"); + case "suggest.reject": + return this.handleMarksRoute(forwardRequest, slug, "reject"); + case "comment.reply": + return this.handleMarksRoute(forwardRequest, slug, "reply"); + case "comment.resolve": + return this.handleMarksRoute(forwardRequest, slug, "resolve"); + case "comment.unresolve": + return this.handleMarksRoute(forwardRequest, slug, "unresolve"); + case "rewrite": + return this.handleAgentRewrite(forwardRequest, slug); + default: + return Response.json( + { success: false, error: `Unknown operation type: ${opType}` }, + { status: 400 }, + ); + } + } + + /** + * Set agent presence in the document's awareness. + */ + private handleAgentPresence( + request: Request, + slug: string, + ): Response { + const agentId = getAgentId(request); + + if (this.awareness) { + this.awareness.setLocalStateField("agent", { + id: agentId, + status: "active", + timestamp: Date.now(), + }); + } + + return Response.json({ success: true }); + } + + /** + * Remove agent presence from the document. + */ + private handleAgentPresenceDisconnect( + request: Request, + ): Response { + if (this.awareness) { + this.awareness.setLocalStateField("agent", null); + } + + return Response.json({ success: true }); + } + + // --------------------------------------------------------------------------- + // Agent edit handlers (v1 + v2) + // --------------------------------------------------------------------------- + + /** + * Edit v1: text-level operations (append, replace, insert). + * Uses agent-edit-ops.ts for string-level markdown manipulation. + */ + private async handleAgentEdit( + request: Request, + slug: string, + ): Promise { + const body = await request.json().catch(() => ({})) as Record; + const agentId = getAgentId(request); + + // Idempotency check + const idempKey = getIdempotencyKey(request, body); + if (idempKey) { + const storage = this.ensureDocStorage(slug); + const idemp = checkIdempotency(storage, slug, "edit", idempKey, false); + if (idemp instanceof Response) return idemp; + if (idemp.isReplay && idemp.cachedResponse) { + return Response.json(idemp.cachedResponse); + } + } + + const doc = this.ensureDoc(); + const markdownText = doc.getText("markdown"); + const currentMarkdown = markdownText.toString(); + + // Parse operations from body + const rawOps = Array.isArray(body.operations) ? body.operations : []; + const operations: AgentEditOperation[] = []; + for (const raw of rawOps) { + if (typeof raw !== "object" || raw === null) continue; + const r = raw as Record; + if (r.op === "append" && typeof r.content === "string") { + operations.push({ op: "append", section: typeof r.section === "string" ? r.section : "", content: r.content }); + } else if (r.op === "replace" && typeof r.search === "string" && typeof r.content === "string") { + operations.push({ op: "replace", search: r.search, content: r.content }); + } else if (r.op === "insert" && typeof r.after === "string" && typeof r.content === "string") { + operations.push({ op: "insert", after: r.after, content: r.content }); + } + } + + if (operations.length === 0) { + return Response.json( + { success: false, error: "No valid operations provided" }, + { status: 400 }, + ); + } + + // Apply operations to stripped markdown + const result = applyAgentEditOperations(currentMarkdown, operations, { by: agentId }); + + if (!result.ok) { + return Response.json( + { success: false, error: result.message, code: result.code, opIndex: result.opIndex }, + { status: 409 }, + ); + } + + // Apply to the ProseMirror fragment so the browser editor sees the change + let finalMarkdown: string; + try { + finalMarkdown = await applyMarkdownToYDoc(doc, result.markdown, "cf-agent-edit"); + } catch { + // Fallback: write to Y.Text only + doc.transact(() => { + const oldLen = markdownText.length; + if (oldLen > 0) markdownText.delete(0, oldLen); + markdownText.insert(0, result.markdown); + }, "cf-agent-edit"); + finalMarkdown = result.markdown; + } + + const response = { + success: true, + revision: this.updateCount, + markdown: stripProofSpanTags(finalMarkdown), + idempotencyKey: idempKey ?? null, + collab: { + status: "converged", + markdownStatus: "converged", + fragmentStatus: "converged", + canonicalStatus: "converged", + }, + }; + + if (idempKey) { + const storage = this.ensureDocStorage(slug); + storeIdempotencyResult(storage, slug, "edit", idempKey, response); + } + + return Response.json(response); + } + + /** + * Edit v2: block-level operations. + * Simplified DO-native implementation that works on markdown sections + * rather than ProseMirror blocks. Supports replace_block, insert_after, + * insert_before, delete_block, and find_replace_in_block. + */ + private async handleAgentEditV2( + request: Request, + slug: string, + ): Promise { + const body = await request.json().catch(() => ({})) as Record; + + // Idempotency check + const idempKey = getIdempotencyKey(request, body); + if (idempKey) { + const storage = this.ensureDocStorage(slug); + const idemp = checkIdempotency(storage, slug, "edit/v2", idempKey, false); + if (idemp instanceof Response) return idemp; + if (idemp.isReplay && idemp.cachedResponse) { + return Response.json(idemp.cachedResponse); + } + } + + const doc = this.ensureDoc(); + const markdownText = doc.getText("markdown"); + const currentMarkdown = stripProofSpanTags(markdownText.toString()); + + // Parse the markdown into blocks (split by double newline or heading boundaries) + const blocks = splitMarkdownIntoBlocks(currentMarkdown); + + const rawOps = Array.isArray(body.operations) ? body.operations : []; + + // Apply operations sequentially + let modifiedBlocks = [...blocks]; + for (const rawOp of rawOps) { + if (typeof rawOp !== "object" || rawOp === null) continue; + const op = rawOp as Record; + const opType = typeof op.op === "string" ? op.op : ""; + const ref = typeof op.ref === "string" ? op.ref : ""; + const refIdx = parseBlockRef(ref); + + switch (opType) { + case "replace_block": { + if (refIdx === null || refIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid block ref: ${ref}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + const block = op.block as Record | undefined; + const newMarkdown = typeof block?.markdown === "string" ? block.markdown : ""; + modifiedBlocks[refIdx] = newMarkdown; + break; + } + case "insert_after": { + if (refIdx === null || refIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid block ref: ${ref}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + const newBlocks = Array.isArray(op.blocks) + ? (op.blocks as Array>).map(b => typeof b.markdown === "string" ? b.markdown : "") + : []; + modifiedBlocks.splice(refIdx + 1, 0, ...newBlocks); + break; + } + case "insert_before": { + if (refIdx === null || refIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid block ref: ${ref}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + const newBlocks = Array.isArray(op.blocks) + ? (op.blocks as Array>).map(b => typeof b.markdown === "string" ? b.markdown : "") + : []; + modifiedBlocks.splice(refIdx, 0, ...newBlocks); + break; + } + case "delete_block": { + if (refIdx === null || refIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid block ref: ${ref}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + modifiedBlocks.splice(refIdx, 1); + break; + } + case "find_replace_in_block": { + if (refIdx === null || refIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid block ref: ${ref}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + const find = typeof op.find === "string" ? op.find : ""; + const replace = typeof op.replace === "string" ? op.replace : ""; + const occurrence = op.occurrence === "all" ? "all" : "first"; + if (occurrence === "all") { + modifiedBlocks[refIdx] = modifiedBlocks[refIdx].replaceAll(find, replace); + } else { + modifiedBlocks[refIdx] = modifiedBlocks[refIdx].replace(find, replace); + } + break; + } + case "replace_range": { + const fromRef = typeof op.fromRef === "string" ? op.fromRef : ""; + const toRef = typeof op.toRef === "string" ? op.toRef : ""; + const fromIdx = parseBlockRef(fromRef); + const toIdx = parseBlockRef(toRef); + if (fromIdx === null || toIdx === null || fromIdx > toIdx || toIdx >= modifiedBlocks.length) { + return Response.json( + { success: false, error: `Invalid range refs: ${fromRef}-${toRef}`, code: "INVALID_REF" }, + { status: 409 }, + ); + } + const newBlocks = Array.isArray(op.blocks) + ? (op.blocks as Array>).map(b => typeof b.markdown === "string" ? b.markdown : "") + : []; + modifiedBlocks.splice(fromIdx, toIdx - fromIdx + 1, ...newBlocks); + break; + } + default: + return Response.json( + { success: false, error: `Unknown operation: ${opType}` }, + { status: 400 }, + ); + } + } + + // Reassemble markdown + const newMarkdown = modifiedBlocks.join("\n\n") + "\n"; + + // Apply to ProseMirror fragment so the browser editor sees the change + let finalMarkdown: string; + try { + finalMarkdown = await applyMarkdownToYDoc(doc, newMarkdown, "cf-agent-edit-v2"); + } catch { + doc.transact(() => { + const oldLen = markdownText.length; + if (oldLen > 0) markdownText.delete(0, oldLen); + markdownText.insert(0, newMarkdown); + }, "cf-agent-edit-v2"); + finalMarkdown = newMarkdown; + } + + const response = { + success: true, + revision: this.updateCount, + markdown: newMarkdown, + idempotencyKey: idempKey ?? null, + collab: { + status: "converged", + markdownStatus: "converged", + fragmentStatus: "converged", + canonicalStatus: "converged", + }, + }; + + if (idempKey) { + const storage = this.ensureDocStorage(slug); + storeIdempotencyResult(storage, slug, "edit/v2", idempKey, response); + } + + return Response.json(response); + } + + // --------------------------------------------------------------------------- + // Marks CRUD handler + // --------------------------------------------------------------------------- + + private async handleMarksRoute( + request: Request, + slug: string, + markAction: string, + ): Promise { + const doc = this.ensureDoc(); + const body = await request.json().catch(() => ({})) as Record; + const agentId = getAgentId(request); + + // Idempotency check + const idempKey = getIdempotencyKey(request, body); + if (idempKey) { + const storage = this.ensureDocStorage(slug); + const idemp = checkIdempotency(storage, slug, `marks/${markAction}`, idempKey, false); + if (idemp instanceof Response) return idemp; + if (idemp.isReplay && idemp.cachedResponse) { + return Response.json(idemp.cachedResponse); + } + } + + let result: MarkOperationResult; + + switch (markAction) { + case "comment": + result = addComment(doc, { + by: typeof body.by === "string" ? body.by : agentId, + text: typeof body.text === "string" ? body.text : "", + quote: typeof body.quote === "string" ? body.quote : (body.selector as any)?.quote, + }); + break; + + case "suggest-replace": + result = addSuggestion(doc, "replace", { + by: typeof body.by === "string" ? body.by : agentId, + quote: typeof body.quote === "string" ? body.quote : (body.selector as any)?.quote ?? "", + content: typeof body.content === "string" ? body.content : undefined, + }); + break; + + case "suggest-insert": + result = addSuggestion(doc, "insert", { + by: typeof body.by === "string" ? body.by : agentId, + quote: typeof body.quote === "string" ? body.quote : (body.selector as any)?.quote ?? "", + content: typeof body.content === "string" ? body.content : undefined, + }); + break; + + case "suggest-delete": + result = addSuggestion(doc, "delete", { + by: typeof body.by === "string" ? body.by : agentId, + quote: typeof body.quote === "string" ? body.quote : (body.selector as any)?.quote ?? "", + }); + break; + + case "accept": + result = acceptSuggestion(doc, { + markId: typeof body.markId === "string" ? body.markId : typeof body.suggestionId === "string" ? body.suggestionId : "", + by: typeof body.by === "string" ? body.by : agentId, + }); + break; + + case "reject": + result = rejectSuggestion(doc, { + markId: typeof body.markId === "string" ? body.markId : typeof body.suggestionId === "string" ? body.suggestionId : "", + by: typeof body.by === "string" ? body.by : agentId, + }); + break; + + case "reply": + result = replyComment(doc, { + markId: typeof body.markId === "string" ? body.markId : typeof body.commentId === "string" ? body.commentId : "", + by: typeof body.by === "string" ? body.by : agentId, + text: typeof body.text === "string" ? body.text : "", + }); + break; + + case "resolve": + result = resolveComment(doc, { + markId: typeof body.markId === "string" ? body.markId : typeof body.commentId === "string" ? body.commentId : "", + by: typeof body.by === "string" ? body.by : agentId, + }); + break; + + case "unresolve": + result = unresolveComment(doc, { + markId: typeof body.markId === "string" ? body.markId : typeof body.commentId === "string" ? body.commentId : "", + by: typeof body.by === "string" ? body.by : agentId, + }); + break; + + default: + return Response.json( + { error: "Unknown marks action", action: markAction }, + { status: 404 }, + ); + } + + // Handle errors + if (!result.success) { + return Response.json( + { success: false, error: result.error, code: result.errorCode }, + { status: result.statusCode ?? 400 }, + ); + } + + // Emit event for agent polling + if (result.eventType && result.eventData) { + const storage = this.ensureDocStorage(slug); + storage.addDocumentEvent(slug, result.eventType, result.eventData, agentId); + } + + const response = { + success: true, + markId: result.markId, + markdown: result.markdown, + marks: result.marks, + updatedAt: new Date().toISOString(), + }; + + // Store idempotency result + if (idempKey) { + const storage = this.ensureDocStorage(slug); + storeIdempotencyResult(storage, slug, `marks/${markAction}`, idempKey, response); + } + + return Response.json(response); + } + + // --------------------------------------------------------------------------- + // Hocuspocus-compatible WebSocket handling + // --------------------------------------------------------------------------- + + private handleWebSocketUpgrade(url: URL): Response { + const pair = new WebSocketPair(); + const [client, server] = [pair[0], pair[1]]; + + // Accept with hibernation API + this.ctx.acceptWebSocket(server); + + // Ensure doc is loaded before any messages arrive + this.ensureDoc(); + + // Derive room name from the URL path (/ws/:slug) or query param (?slug=...). + // HocuspocusProvider strips ?slug= from collabWsUrl so we use path-based routing. + const pathMatch = url.pathname.match(/\/ws\/([^/]+)/); + const slug = pathMatch + ? decodeURIComponent(pathMatch[1]) + : url.searchParams.get("slug") ?? "unknown"; + this.roomName = slug; + + // Don't send anything yet — Hocuspocus protocol requires the client to + // initiate with Auth + SyncStep1. The server responds to those messages. + + return new Response(null, { status: 101, webSocket: client }); + } + + /** Get the room name for Hocuspocus message framing. */ + private getRoomName(): string { + return this.roomName ?? "unknown"; + } + + /** + * Create an encoder pre-filled with the Hocuspocus message header: + * varString(documentName) + varUint(messageType) + */ + private createHocuspocusEncoder(messageType: number): encoding.Encoder { + const encoder = encoding.createEncoder(); + encoding.writeVarString(encoder, this.getRoomName()); + encoding.writeVarUint(encoder, messageType); + return encoder; + } + + async webSocketMessage( + ws: WebSocket, + message: string | ArrayBuffer, + ): Promise { + // Hocuspocus protocol uses binary messages + if (typeof message === "string") return; + + const doc = this.ensureDoc(); + const data = new Uint8Array(message); + const decoder = decoding.createDecoder(data); + + // Hocuspocus wire format: varString(documentName) + varUint(messageType) + payload + const documentName = decoding.readVarString(decoder); + const messageType = decoding.readVarUint(decoder); + + // Remember the room name from the first message if not yet set + if (!this.roomName) { + this.roomName = documentName; + } + + switch (messageType) { + case MessageType.Sync: + this.handleSyncMessage(ws, decoder, doc); + break; + case MessageType.Awareness: + this.handleAwarenessMessage(ws, decoder); + break; + case MessageType.Auth: + this.handleAuthMessage(ws, decoder); + break; + case MessageType.QueryAwareness: + this.handleQueryAwarenessMessage(ws); + break; + case MessageType.CLOSE: + ws.close(); + break; + } + } + + /** + * Handle Hocuspocus Auth message. + * Always responds with Authenticated + read-write scope. + */ + private handleAuthMessage( + ws: WebSocket, + decoder: decoding.Decoder, + ): void { + // Read the auth sub-type and token (we accept any token) + const authType = decoding.readVarUint(decoder); + if (authType === AuthMessageType.Token) { + // Read and discard the token value + decoding.readVarString(decoder); + } + + // Respond with Authenticated + const encoder = this.createHocuspocusEncoder(MessageType.Auth); + encoding.writeVarUint(encoder, AuthMessageType.Authenticated); + encoding.writeVarString(encoder, "read-write"); + ws.send(encoding.toUint8Array(encoder)); + } + + private handleSyncMessage( + ws: WebSocket, + decoder: decoding.Decoder, + doc: Y.Doc, + ): void { + // Build a reply encoder with the Hocuspocus header + const encoder = this.createHocuspocusEncoder(MessageType.Sync); + + // readSyncMessage applies the message and writes a response if needed. + // Origin is set to the WebSocket so our doc.on('update') handler knows + // which client sent the update. + const syncMessageType = syncProtocol.readSyncMessage( + decoder, + encoder, + doc, + ws, + ); + + // If readSyncMessage wrote a response (e.g. step 2 reply to step 1), + // send it back to the requesting client + if (encoding.length(encoder) > encodedHeaderLength(this.getRoomName(), MessageType.Sync)) { + ws.send(encoding.toUint8Array(encoder)); + } + + // After responding to SyncStep1 with SyncStep2, send a plain Sync/SyncStep1. + // HocuspocusProvider answers that follow-up with SyncStep2, which lets the + // server send SyncStatus and clear the client's initial unsynced counter. + if (syncMessageType === 0) { + ws.send(createSyncStep1Message(this.getRoomName(), doc)); + } + + // Send SyncStatus acknowledgment for step 2 and update messages + if (syncMessageType === 1 || syncMessageType === 2) { + const statusEncoder = this.createHocuspocusEncoder(MessageType.SyncStatus); + encoding.writeVarUint(statusEncoder, 1); // 1 = success + ws.send(encoding.toUint8Array(statusEncoder)); + } + } + + private handleAwarenessMessage( + ws: WebSocket, + decoder: decoding.Decoder, + ): void { + if (!this.awareness) return; + + const update = decoding.readVarUint8Array(decoder); + + // Track which awareness client IDs this socket controls before applying, + // by decoding the update's client IDs from the wire format. + const clientIds = decodeAwarenessClientIds(update); + if (clientIds.length > 0) { + const existing = this.socketAwarenessClients.get(ws) ?? new Set(); + for (const id of clientIds) existing.add(id); + this.socketAwarenessClients.set(ws, existing); + } + + awarenessProtocol.applyAwarenessUpdate(this.awareness, update, ws); + + // Broadcast awareness to all other connected clients (don't persist) + const sockets = this.getWebSockets(); + for (const socket of sockets) { + if (socket !== ws && socket.readyState === WebSocket.READY_STATE_OPEN) { + const encoder = this.createHocuspocusEncoder(MessageType.Awareness); + encoding.writeVarUint8Array(encoder, update); + socket.send(encoding.toUint8Array(encoder)); + } + } + } + + private handleQueryAwarenessMessage(ws: WebSocket): void { + if (!this.awareness) return; + + const states = this.awareness.getStates(); + if (states.size === 0) return; + + const encoder = this.createHocuspocusEncoder(MessageType.Awareness); + const clients = Array.from(states.keys()); + encoding.writeVarUint8Array( + encoder, + awarenessProtocol.encodeAwarenessUpdate(this.awareness, clients), + ); + ws.send(encoding.toUint8Array(encoder)); + } + + /** + * Persist an incremental Y.Doc update to SQLite. + * Triggers compaction when the update count exceeds the threshold. + */ + private persistUpdate(update: Uint8Array): void { + const sql = this.ctx.storage.sql; + sql.exec( + "INSERT INTO yjs_updates (update_data) VALUES (?)", + update as unknown as ArrayBuffer, + ); + this.updateCount++; + if (this.roomName) { + void this.touchDocumentMetadata(this.roomName); + } + + if (this.updateCount >= COMPACTION_THRESHOLD) { + this.compact(); + } + } + + private async touchDocumentMetadata(slug: string): Promise { + await this.env.CATALOG_DB.prepare( + "UPDATE documents SET updated_at = datetime('now') WHERE slug = ?", + ) + .bind(slug) + .run(); + } + + /** + * Broadcast an update to all connected WebSocket clients except the origin. + * Uses Hocuspocus framing: varString(documentName) + varUint(Sync) + update. + */ + private broadcastUpdate(update: Uint8Array, origin: WebSocket): void { + const encoder = this.createHocuspocusEncoder(MessageType.Sync); + syncProtocol.writeUpdate(encoder, update); + const message = encoding.toUint8Array(encoder); + + const sockets = this.getWebSockets(); + for (const socket of sockets) { + if (socket !== origin && socket.readyState === WebSocket.READY_STATE_OPEN) { + socket.send(message); + } + } + } + + /** + * Compact all incremental updates into a single snapshot. + * This reduces SQLite row count and speeds up future doc loads. + */ + private compact(): void { + if (!this.doc) return; + + const sql = this.ctx.storage.sql; + const snapshot = Y.encodeStateAsUpdate(this.doc); + + // Replace snapshot and clear incremental updates in one transaction + sql.exec( + `INSERT OR REPLACE INTO document_state (key, value, updated_at) + VALUES ('yjs_snapshot', ?, datetime('now'))`, + snapshot as unknown as ArrayBuffer, + ); + sql.exec("DELETE FROM yjs_updates"); + this.updateCount = 0; + } + + /** + * Get all hibernated WebSocket connections managed by this DO. + */ + private getWebSockets(): WebSocket[] { + return this.ctx.getWebSockets(); + } + + private async handleContentRequest(request: Request): Promise { + if (request.method === "GET") { + const doc = this.ensureDoc(); + let content: string | null = null; + try { + const projection = await deriveMarkdownFromYDoc(doc); + content = projection.markdown; + } catch { + // Fall back to raw Y.Text if ProseMirror projection fails + content = doc.getText("markdown").toString() || null; + } + + return Response.json({ content }); + } + + return new Response("Method Not Allowed", { status: 405 }); + } + + /** + * Returns the current Y.Doc state as a base64-encoded binary snapshot. + */ + private handleSnapshotRequest(): Response { + const doc = this.ensureDoc(); + const stateUpdate = Y.encodeStateAsUpdate(doc); + + // Convert to base64 for JSON transport + const base64 = uint8ArrayToBase64(stateUpdate); + + return new Response( + JSON.stringify({ + snapshot: base64, + clientCount: this.getWebSockets().length, + updateCount: this.updateCount, + }), + { + headers: { "content-type": "application/json" }, + }, + ); + } + + async webSocketClose( + ws: WebSocket, + _code: number, + _reason: string, + _wasClean: boolean, + ): Promise { + // Remove awareness states for the disconnecting client's tracked IDs + if (this.awareness) { + const clientIds = this.socketAwarenessClients.get(ws); + if (clientIds && clientIds.size > 0) { + awarenessProtocol.removeAwarenessStates( + this.awareness, + Array.from(clientIds), + "websocket close", + ); + } + this.socketAwarenessClients.delete(ws); + } + + ws.close(); + + // If no clients remain, schedule cleanup alarm + if (this.getWebSockets().length === 0) { + await this.ctx.storage.setAlarm(Date.now() + CLEANUP_DELAY_MS); + } + } + + async webSocketError(ws: WebSocket, _error: unknown): Promise { + ws.close(); + + if (this.getWebSockets().length === 0) { + await this.ctx.storage.setAlarm(Date.now() + CLEANUP_DELAY_MS); + } + } + + /** + * Alarm handler for periodic maintenance. + * When all clients have disconnected, compact and release the Y.Doc from memory. + */ + async alarm(): Promise { + // If clients reconnected before the alarm fired, skip cleanup + if (this.getWebSockets().length > 0) return; + + // Compact before releasing memory + if (this.doc) { + this.compact(); + } + + // Release Y.Doc and awareness from memory — they'll be rehydrated on + // next connection + if (this.awareness) { + this.awareness.destroy(); + this.awareness = null; + } + if (this.doc) { + this.doc.destroy(); + this.doc = null; + } + this.roomName = null; + } +} + +/** + * Calculate the encoded byte length of a Hocuspocus message header + * (varString(documentName) + varUint(messageType)). + * Used to detect whether readSyncMessage wrote any reply payload. + */ +function encodedHeaderLength(documentName: string, messageType: number): number { + const encoder = encoding.createEncoder(); + encoding.writeVarString(encoder, documentName); + encoding.writeVarUint(encoder, messageType); + return encoding.length(encoder); +} + +function createSyncStep1Message(documentName: string, doc: Y.Doc): Uint8Array { + const encoder = encoding.createEncoder(); + encoding.writeVarString(encoder, documentName); + encoding.writeVarUint(encoder, MessageType.Sync); + syncProtocol.writeSyncStep1(encoder, doc); + return encoding.toUint8Array(encoder); +} + +/** + * Decode awareness client IDs from a y-protocols awareness update. + * Wire format: varUint(count), then for each entry: varUint(clientID) + varUint(clock) + varString(state). + */ +function decodeAwarenessClientIds(update: Uint8Array): number[] { + const decoder = decoding.createDecoder(update); + const count = decoding.readVarUint(decoder); + const ids: number[] = []; + for (let i = 0; i < count; i++) { + ids.push(decoding.readVarUint(decoder)); + decoding.readVarUint(decoder); // clock + decoding.readVarString(decoder); // state JSON + } + return ids; +} + +/** + * Encode a Uint8Array as a base64 string. + * Uses btoa which is available in the Workers runtime. + */ +function uint8ArrayToBase64(bytes: Uint8Array): string { + let binary = ""; + for (let i = 0; i < bytes.length; i++) { + binary += String.fromCharCode(bytes[i]); + } + return btoa(binary); +} + +/** + * Split markdown into logical blocks (headings, paragraphs, lists, code blocks). + * Each block is the raw markdown text. Blocks are separated by blank lines. + */ +function splitMarkdownIntoBlocks(markdown: string): string[] { + if (!markdown.trim()) return []; + + const lines = markdown.split("\n"); + const blocks: string[] = []; + let current: string[] = []; + let inCodeBlock = false; + + for (const line of lines) { + const trimmed = line.trimStart(); + + // Track code fences + if (trimmed.startsWith("```") || trimmed.startsWith("~~~")) { + inCodeBlock = !inCodeBlock; + current.push(line); + continue; + } + + if (inCodeBlock) { + current.push(line); + continue; + } + + // Blank line separates blocks + if (line.trim() === "") { + if (current.length > 0) { + blocks.push(current.join("\n")); + current = []; + } + continue; + } + + // Heading starts a new block + if (/^#{1,6}\s/.test(trimmed) && current.length > 0) { + blocks.push(current.join("\n")); + current = [line]; + continue; + } + + current.push(line); + } + + if (current.length > 0) { + blocks.push(current.join("\n")); + } + + return blocks; +} + +/** + * Parse a block ref like "b1", "b2" into a 0-based index. + * Returns null if invalid. + */ +function parseBlockRef(ref: string): number | null { + const match = ref.match(/^b(\d+)$/); + if (!match) return null; + const idx = Number.parseInt(match[1], 10) - 1; // b1 → index 0 + return idx >= 0 ? idx : null; +} + +/** + * Determine the minimum required role for an agent route. + */ +function getRequiredRole(subPath: string, method: string): "viewer" | "commenter" | "editor" | "owner_bot" { + // Read-only routes + if (method === "GET") return "viewer"; + + // Comment/suggestion creation + if (subPath === "marks/comment" || subPath.startsWith("marks/suggest")) return "commenter"; + if (subPath === "marks/reply") return "commenter"; + + // Resolve/unresolve — viewer can resolve own + if (subPath === "marks/resolve" || subPath === "marks/unresolve") return "viewer"; + + // Accept/reject, edit, rewrite, ops + if (subPath === "marks/accept" || subPath === "marks/reject") return "editor"; + if (subPath === "edit" || subPath === "edit/v2") return "editor"; + if (subPath === "rewrite") return "editor"; + if (subPath === "ops") return "editor"; + if (subPath === "presence" || subPath === "presence/disconnect") return "editor"; + + // Admin operations + if (subPath === "repair" || subPath === "clone-from-canonical") return "owner_bot"; + + // Events + if (subPath === "events/ack") return "editor"; + + return "viewer"; +} diff --git a/apps/proof-cloudflare/src/event-types.ts b/apps/proof-cloudflare/src/event-types.ts new file mode 100644 index 0000000..8d5bf83 --- /dev/null +++ b/apps/proof-cloudflare/src/event-types.ts @@ -0,0 +1,49 @@ +/** + * Typed event registry for document and agent events. + * + * Replaces freeform event type strings with a single const object and + * derived union type so that typos are caught at compile time. + */ + +export const DocumentEventType = { + // Document lifecycle + 'document.created': 'document.created', + 'document.updated': 'document.updated', + 'document.rewritten': 'document.rewritten', + 'document.deleted': 'document.deleted', + 'document.paused': 'document.paused', + 'document.resumed': 'document.resumed', + 'document.revoked': 'document.revoked', + 'document.title.updated': 'document.title.updated', + 'document.edited': 'document.edited', + + // Agent activity + 'agent.connected': 'agent.connected', + 'agent.presence': 'agent.presence', + 'agent.disconnected': 'agent.disconnected', + 'agent.edit': 'agent.edit', + 'agent.edit.v2': 'agent.edit.v2', + + // Comments + 'comment.added': 'comment.added', + 'comment.replied': 'comment.replied', + 'comment.resolved': 'comment.resolved', + 'comment.unresolved': 'comment.unresolved', + + // Suggestions — add/accept/reject + 'suggestion.insert.added': 'suggestion.insert.added', + 'suggestion.delete.added': 'suggestion.delete.added', + 'suggestion.replace.added': 'suggestion.replace.added', + 'suggestion.accepted': 'suggestion.accepted', + 'suggestion.rejected': 'suggestion.rejected', + + // Mention + 'mention': 'mention', +} as const; + +export type DocumentEventType = (typeof DocumentEventType)[keyof typeof DocumentEventType]; + +/** Runtime type guard for DocumentEventType values. */ +export function isDocumentEventType(value: unknown): value is DocumentEventType { + return typeof value === 'string' && value in DocumentEventType; +} diff --git a/apps/proof-cloudflare/src/idempotency.ts b/apps/proof-cloudflare/src/idempotency.ts new file mode 100644 index 0000000..5e6fa12 --- /dev/null +++ b/apps/proof-cloudflare/src/idempotency.ts @@ -0,0 +1,84 @@ +/** + * Idempotency middleware for agent mutation routes. + * + * All POST mutations require an idempotency-key header. If the key has been + * seen before for the same document + route, the cached response is returned + * without re-executing the mutation. + */ + +import type { DocumentStorage, StorageIdempotencyRecord } from './storage-interface.js'; + +export interface IdempotencyResult { + /** True if this is a replay of a previous request. */ + isReplay: boolean; + /** The cached response (only set when isReplay is true). */ + cachedResponse?: Record; + /** The idempotency key extracted from the request. */ + key: string; +} + +/** + * Extract the idempotency key from request headers or body. + * Returns null if not present. + */ +export function getIdempotencyKey(request: Request, body?: Record): string | null { + // Check header first (used by edit v1/v2) + const headerKey = request.headers.get('idempotency-key'); + if (headerKey) return headerKey; + + // Check body (used by marks operations) + if (body && typeof body.idempotencyKey === 'string') { + return body.idempotencyKey; + } + + return null; +} + +/** + * Check idempotency for a mutation request. + * Returns a replay result if the key has been seen, or a fresh result + * with the key for the caller to store after executing the mutation. + */ +export function checkIdempotency( + storage: DocumentStorage, + slug: string, + route: string, + key: string | null, + required: boolean, +): IdempotencyResult | Response { + if (!key) { + if (required) { + return Response.json( + { error: 'idempotency-key required for mutation' }, + { status: 400 }, + ); + } + // Not required and not provided — proceed without idempotency + return { isReplay: false, key: '' }; + } + + const cached = storage.getStoredIdempotencyRecord(slug, route, key); + if (cached) { + return { + isReplay: true, + cachedResponse: cached.response, + key, + }; + } + + return { isReplay: false, key }; +} + +/** + * Store the result of a mutation for future idempotency replays. + */ +export function storeIdempotencyResult( + storage: DocumentStorage, + slug: string, + route: string, + key: string, + response: Record, +): void { + if (!key) return; + storage.storeIdempotencyResult(slug, route, key, response); +} diff --git a/apps/proof-cloudflare/src/index.ts b/apps/proof-cloudflare/src/index.ts new file mode 100644 index 0000000..f2a38c3 --- /dev/null +++ b/apps/proof-cloudflare/src/index.ts @@ -0,0 +1,270 @@ +/** + * Cloudflare Workers entry point for Proof SDK. + * + * Routes incoming requests to static assets, the D1 document catalog, + * or per-document Durable Objects. Each document gets its own DO instance + * for state isolation and collab. + */ + +import { DocumentSession } from "./document-session.js"; + +export { DocumentSession }; + +/** Cloudflare bindings: Durable Objects, D1, and static assets. */ +export interface Env { + DOCUMENT_SESSION: DurableObjectNamespace; + CATALOG_DB: D1Database; + ASSETS: Fetcher; +} + +const SLUG_LENGTH = 8; +const SLUG_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789"; + +function generateSlug(): string { + const bytes = crypto.getRandomValues(new Uint8Array(SLUG_LENGTH)); + let slug = ""; + for (let i = 0; i < SLUG_LENGTH; i++) { + slug += SLUG_CHARS[bytes[i] % SLUG_CHARS.length]; + } + return slug; +} + +export default { + async fetch( + request: Request, + env: Env, + _ctx: ExecutionContext, + ): Promise { + const url = new URL(request.url); + const path = url.pathname; + + // Health check + if (path === "/health") { + return new Response("OK", { status: 200 }); + } + + // Agent discovery + if (path === "/.well-known/agent.json") { + const base = url.origin; + return Response.json({ + name: "Proof Editor", + description: "Agent-native markdown editor with collaborative sharing and provenance tracking", + api_base: `${base}/api`, + capabilities: ["create_document", "share", "comment", "suggest", "rewrite", "collab", "provenance"], + auth: { + methods: ["api_key", "none"], + api_key_header: "Authorization: Bearer ", + no_auth_allowed: true, + shared_link: { + token_from_url: "?token=", + preferred_header: "x-share-token", + alt_header: "x-bridge-token", + }, + }, + quickstart: { + received_link: { + description: "Given a Proof share URL, read it in one step", + method: "GET", + url: "/api/agent/{slug}/state", + headers: { "x-share-token": "{token}" }, + returns: "markdown + marks + _links", + }, + create_and_share: { + method: "POST", + url: "/documents", + body: { title: "My Document" }, + returns: "slug + url", + }, + }, + }, { + headers: { "cache-control": "public, max-age=300" }, + }); + } + + // Root → create a new document and redirect to /d/:slug + if (path === "/") { + const slug = generateSlug(); + const doId = env.DOCUMENT_SESSION.idFromName(slug).toString(); + await env.CATALOG_DB.prepare( + "INSERT INTO documents (id, slug, title, do_id) VALUES (?, ?, ?, ?)", + ) + .bind(doId, slug, "", doId) + .run(); + return Response.redirect(new URL(`/d/${slug}?new=1`, url.origin).toString(), 302); + } + + // Document editor — serve the SPA for /d/:slug + const docPageMatch = path.match(/^\/d\/([^/]+)\/?$/); + if (docPageMatch) { + const slug = decodeURIComponent(docPageMatch[1]); + // Fetch index.html via the static assets binding, then rewrite + // relative paths to absolute so ./assets/editor.js doesn't resolve + // to /d/assets/editor.js + const assetResponse = await env.ASSETS.fetch( + new Request(new URL("/index.html", url.origin)), + ); + let html = await assetResponse.text(); + html = html.replaceAll('"./', '"/').replaceAll("'./", "'/"); + + // When arriving from doc creation (?new=1), suppress the "shared with + // you" welcome toast and name prompt — the user is the creator, not a + // share recipient. + const isNewDoc = url.searchParams.get("new") === "1"; + if (isNewDoc) { + const suppressScript = ``; + html = html.replace("", `${suppressScript}`); + } + + return new Response(html, { + headers: { "content-type": "text/html; charset=utf-8" }, + }); + } + + // POST /documents — API document creation + if (path === "/documents" && request.method === "POST") { + const contentType = request.headers.get("content-type") ?? ""; + let markdown = ""; + let title = ""; + + if (contentType.includes("application/json")) { + const body = await request.json().catch(() => ({})) as Record; + markdown = typeof body.markdown === "string" ? body.markdown : ""; + title = typeof body.title === "string" ? body.title : ""; + } else if (contentType.includes("text/")) { + markdown = await request.text(); + } + + if (!title) { + const headingMatch = markdown.match(/^#\s+(.+)$/m); + title = headingMatch ? headingMatch[1].trim() : ""; + } + + const slug = generateSlug(); + const doId = env.DOCUMENT_SESSION.idFromName(slug).toString(); + await env.CATALOG_DB.prepare( + "INSERT INTO documents (id, slug, title, do_id) VALUES (?, ?, ?, ?)", + ) + .bind(doId, slug, title, doId) + .run(); + + // Write markdown content to the DO if provided + if (markdown) { + const id = env.DOCUMENT_SESSION.idFromName(slug); + const stub = env.DOCUMENT_SESSION.get(id); + await stub.fetch(new Request( + new URL(`/api/agent/${slug}/rewrite`, url.origin), + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ content: markdown, force: true }), + }, + )); + } + + const shareUrl = `${url.origin}/d/${slug}`; + return Response.json( + { success: true, slug, url: `/d/${slug}`, shareUrl, title }, + { status: 201 }, + ); + } + + // POST /share/markdown or /api/share/markdown — create doc from raw markdown + if ((path === "/share/markdown" || path === "/api/share/markdown") && request.method === "POST") { + const contentType = request.headers.get("content-type") ?? ""; + let markdown = ""; + let title = ""; + + if (contentType.includes("application/json")) { + const body = await request.json().catch(() => ({})) as Record; + markdown = typeof body.markdown === "string" ? body.markdown : ""; + title = typeof body.title === "string" ? body.title : ""; + } else { + // text/plain or text/markdown — body IS the markdown + markdown = await request.text(); + } + + if (!title) { + // Extract title from first heading + const headingMatch = markdown.match(/^#\s+(.+)$/m); + title = headingMatch ? headingMatch[1].trim() : "Untitled"; + } + + const slug = generateSlug(); + const doId = env.DOCUMENT_SESSION.idFromName(slug).toString(); + await env.CATALOG_DB.prepare( + "INSERT INTO documents (id, slug, title, do_id) VALUES (?, ?, ?, ?)", + ) + .bind(doId, slug, title, doId) + .run(); + + // Write the markdown content to the DO + if (markdown) { + const id = env.DOCUMENT_SESSION.idFromName(slug); + const stub = env.DOCUMENT_SESSION.get(id); + await stub.fetch(new Request( + new URL(`/api/agent/${slug}/rewrite`, url.origin), + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ content: markdown, force: true }), + }, + )); + } + + const shareUrl = `${url.origin}/d/${slug}`; + return Response.json( + { success: true, slug, url: `/d/${slug}`, shareUrl, title }, + { status: 201 }, + ); + } + + if (path === "/api/metrics/collab-reconnect" && request.method === "POST") { + return new Response(null, { status: 204 }); + } + + // Agent bridge routes — delegate to DO by slug + // Matches /api/agent/:slug/* (state, edit, marks, events, etc.) + const agentMatch = path.match(/^\/api\/agent\/([^/]+)(\/.*)?$/); + if (agentMatch) { + const slug = agentMatch[1]; + return routeToDocumentSession(request, env, slug); + } + + // Document API routes — delegate to Durable Object by slug + // Matches both /documents/:slug/... and /api/documents/:slug/... + const documentMatch = path.match(/^(?:\/api)?\/documents\/([^/]+)(\/.*)?$/); + if (documentMatch) { + const slug = documentMatch[1]; + return routeToDocumentSession(request, env, slug); + } + + // WebSocket upgrade — route to DO + // Supports both /ws/:slug (path-based, used by HocuspocusProvider) and + // /ws?slug=... (query-based, legacy) + const wsMatch = path.match(/^\/ws\/([^/]+)\/?$/); + if (wsMatch) { + return routeToDocumentSession(request, env, decodeURIComponent(wsMatch[1])); + } + if (path === "/ws") { + const slug = url.searchParams.get("slug"); + if (!slug) { + return new Response("Missing slug parameter", { status: 400 }); + } + return routeToDocumentSession(request, env, slug); + } + + return new Response("Not Found", { status: 404 }); + }, +} satisfies ExportedHandler; + +/** Forward a request to the Durable Object instance for the given document slug. */ +async function routeToDocumentSession( + request: Request, + env: Env, + slug: string, +): Promise { + const id = env.DOCUMENT_SESSION.idFromName(slug); + const stub = env.DOCUMENT_SESSION.get(id); + return stub.fetch(request); +} + diff --git a/apps/proof-cloudflare/src/mention-detection.ts b/apps/proof-cloudflare/src/mention-detection.ts new file mode 100644 index 0000000..3b733cf --- /dev/null +++ b/apps/proof-cloudflare/src/mention-detection.ts @@ -0,0 +1,39 @@ +/** + * Mention detection for comment and reply text. + * + * Detects @-mentions in plain text and returns structured match data. + * Uses the same `@proof` pattern from `src/agent/trigger-service.ts` for + * consistency, but also captures arbitrary `@username` mentions. + */ + +export interface MentionMatch { + /** The target username/identifier (without the leading `@`). */ + target: string; + /** The full matched text including the `@` prefix. */ + text: string; + /** Character index of the `@` in the source string. */ + index: number; +} + +const MENTION_PATTERN = /@(\w+)\b/g; + +/** + * Scan `text` for @-mentions and return all matches. + * + * The regex is intentionally simple (`/@(\w+)\b/g`) — it matches + * `@proof`, `@alice`, `@agent_1`, etc. + */ +export function detectMentions(text: string): MentionMatch[] { + const matches: MentionMatch[] = []; + const pattern = new RegExp(MENTION_PATTERN.source, MENTION_PATTERN.flags); + for (const match of text.matchAll(pattern)) { + if (typeof match.index === 'number' && match[1]) { + matches.push({ + target: match[1], + text: match[0], + index: match.index, + }); + } + } + return matches; +} diff --git a/apps/proof-cloudflare/src/milkdown-headless.ts b/apps/proof-cloudflare/src/milkdown-headless.ts new file mode 100644 index 0000000..bde8d64 --- /dev/null +++ b/apps/proof-cloudflare/src/milkdown-headless.ts @@ -0,0 +1,198 @@ +/** + * Headless Milkdown engine for Cloudflare Workers. + * + * Provides ProseMirror schema + markdown parser/serializer without a DOM. + * Ported from server/milkdown-headless.ts with import paths adjusted for + * the cloudflare app location within the monorepo. + */ + +import { Editor, editorViewCtx, marksCtx, nodesCtx, remarkStringifyOptionsCtx } from '@milkdown/core'; +import { schema as commonmarkSchema } from '@milkdown/preset-commonmark'; +import { schema as gfmSchema } from '@milkdown/preset-gfm'; +import { Schema, type Node as ProseMirrorNode } from '@milkdown/prose/model'; +import { ParserState, SerializerState } from '@milkdown/transformer'; +import remarkFrontmatter from 'remark-frontmatter'; +import remarkGfm from 'remark-gfm'; +import remarkParse from 'remark-parse'; +import remarkStringify from 'remark-stringify'; +import { unified } from 'unified'; + +// Schema plugins — same as browser editor so markdown round-trips correctly +import { codeBlockExtPlugins } from '../../../src/editor/schema/code-block-ext.js'; +import { frontmatterSchema } from '../../../src/editor/schema/frontmatter.js'; +import { proofMarkPlugins } from '../../../src/editor/schema/proof-marks.js'; +import { remarkProofMarks, proofMarkHandler } from '../../../src/formats/remark-proof-marks.js'; + +export type HeadlessMilkdownParser = { + schema: Schema; + parseMarkdown: (markdown: string) => ProseMirrorNode; +}; + +type HeadlessMilkdown = HeadlessMilkdownParser & { + serializeMarkdown: (doc: ProseMirrorNode) => string; + serializeSingleNode: (node: ProseMirrorNode) => string; +}; + +let enginePromise: Promise | null = null; + +const INLINE_HTML_TAG_PATTERN = /<\/?[A-Za-z][A-Za-z0-9-]*(?:\s+[^>\n]*)?\s*\/?>/g; +const STANDALONE_HTML_LINE_PATTERN = /^[ \t]*(?:|<\/?[A-Za-z][A-Za-z0-9-]*(?:\s+[^>\n]*)?\s*\/?>)[ \t]*$/gm; + +export function stripStandaloneHtmlLines(markdown: string): string { + return markdown + .replace(STANDALONE_HTML_LINE_PATTERN, '') + .replace(/\n{3,}/g, '\n\n') + .trimEnd(); +} + +export function stripInlineHtmlTags(markdown: string): string { + return markdown + .replace(//g, '') + .replace(INLINE_HTML_TAG_PATTERN, '') + .replace(/\n{3,}/g, '\n\n') + .trimEnd(); +} + +export type MarkdownParseFallbackMode = 'original' | 'strip_html_lines' | 'strip_html_tags' | 'failed'; + +export type MarkdownParseWithFallbackResult = { + doc: ProseMirrorNode | null; + mode: MarkdownParseFallbackMode; + error: unknown; +}; + +export function parseMarkdownWithHtmlFallback( + parser: HeadlessMilkdownParser, + markdown: string, +): MarkdownParseWithFallbackResult { + const input = markdown ?? ''; + const candidates: Array<{ mode: Exclude; value: string }> = []; + candidates.push({ mode: 'original', value: input }); + + const withoutHtmlLines = stripStandaloneHtmlLines(input); + if (withoutHtmlLines !== input) { + candidates.push({ mode: 'strip_html_lines', value: withoutHtmlLines }); + } + + const withoutHtmlTags = stripInlineHtmlTags(withoutHtmlLines); + if (withoutHtmlTags !== withoutHtmlLines) { + candidates.push({ mode: 'strip_html_tags', value: withoutHtmlTags }); + } + + let lastError: unknown = null; + for (const candidate of candidates) { + try { + return { + doc: parser.parseMarkdown(candidate.value), + mode: candidate.mode, + error: null, + }; + } catch (error) { + lastError = error; + } + } + + return { + doc: null, + mode: 'failed', + error: lastError, + }; +} + +function createSerializer(schema: Schema): (doc: ProseMirrorNode) => string { + const processor = unified() + .use(remarkGfm) + .use(remarkFrontmatter, ['yaml']) + .use(remarkStringify, { + handlers: { + proofMark: proofMarkHandler, + }, + }); + + return SerializerState.create(schema as any, processor as any) as unknown as (doc: ProseMirrorNode) => string; +} + +async function buildHeadless(): Promise { + const editor = Editor.make(); + const ctx = editor.ctx; + + ctx.inject(nodesCtx, []); + ctx.inject(marksCtx, []); + ctx.inject(remarkStringifyOptionsCtx, { handlers: {}, encode: [] } as any); + + let currentDoc: ProseMirrorNode | null = null; + const editorViewStub = { + state: { + get doc() { + return currentDoc; + }, + }, + } as any; + ctx.inject(editorViewCtx, editorViewStub); + + const plugins = [ + ...commonmarkSchema, + ...gfmSchema, + ...frontmatterSchema, + ...codeBlockExtPlugins, + ...proofMarkPlugins, + ].flat(); + + for (const plugin of plugins) { + const runner = plugin(ctx); + if (typeof runner === 'function') { + await runner(); + } + } + + const nodes = Object.fromEntries(ctx.get(nodesCtx) as any); + const marks = Object.fromEntries(ctx.get(marksCtx) as any); + const schema = new Schema({ nodes, marks }); + + const processor = unified() + .use(remarkParse) + .use(remarkFrontmatter, ['yaml']) + .use(remarkGfm) + .use(remarkProofMarks); + + const parseMarkdown = ParserState.create(schema as any, processor as any) as unknown as (markdown: string) => ProseMirrorNode; + const serializer = createSerializer(schema); + const serializeMarkdown = (doc: ProseMirrorNode): string => { + currentDoc = doc; + return serializer(doc); + }; + const serializeSingleNode = (node: ProseMirrorNode): string => { + if (node.type.name === schema.topNodeType.name) { + return serializeMarkdown(node); + } + const wrapper = schema.topNodeType.create(null, [node]); + return serializeMarkdown(wrapper); + }; + + return { schema, parseMarkdown, serializeMarkdown, serializeSingleNode }; +} + +async function getHeadlessMilkdown(): Promise { + if (!enginePromise) { + enginePromise = buildHeadless().catch((error) => { + enginePromise = null; + throw error; + }); + } + return enginePromise; +} + +export async function getHeadlessMilkdownParser(): Promise { + const engine = await getHeadlessMilkdown(); + return { schema: engine.schema, parseMarkdown: engine.parseMarkdown }; +} + +export async function serializeMarkdown(doc: ProseMirrorNode): Promise { + const engine = await getHeadlessMilkdown(); + return engine.serializeMarkdown(doc); +} + +export async function serializeSingleNode(node: ProseMirrorNode): Promise { + const engine = await getHeadlessMilkdown(); + return engine.serializeSingleNode(node); +} diff --git a/apps/proof-cloudflare/src/proof-span-strip.ts b/apps/proof-cloudflare/src/proof-span-strip.ts new file mode 100644 index 0000000..8ed5e19 --- /dev/null +++ b/apps/proof-cloudflare/src/proof-span-strip.ts @@ -0,0 +1,459 @@ +const authoredProofAttrRegex = /data-proof\s*=\s*(?:"authored"|'authored'|authored)/i; +const anyProofAttrRegex = /data-proof\s*=\s*(?:"[^"]+"|'[^']+'|[^\s>]+)/i; + +type ProofReplacementMark = { + kind?: unknown; + quote?: unknown; +}; + +type AuthoredSpanBounds = { + openStart: number; + contentStart: number; + contentEnd: number; + closeEnd: number; +}; + +type ProofRange = { + id: string; + start: number; + end: number; +}; + +type StripStackEntry = { + isProof: boolean; + proofId: string | null; + contentStart: number; +}; + +function isAuthoredProofSpan(tag: string): boolean { + return authoredProofAttrRegex.test(tag); +} + +function isAnyProofSpan(tag: string): boolean { + return anyProofAttrRegex.test(tag); +} + +function extractProofSpanId(tag: string): string | null { + const match = tag.match(/data-id\s*=\s*(?:"([^"]+)"|'([^']+)'|([^\s>]+))/i); + const id = match?.[1] ?? match?.[2] ?? match?.[3] ?? null; + return typeof id === 'string' && id.trim().length > 0 ? id.trim() : null; +} + +function hasActiveSuppression(stack: Array<{ suppressContent: boolean }>): boolean { + return stack.some((entry) => entry.suppressContent); +} + +function collectStrippedProofData( + markdown: string, + shouldStrip: (tag: string) => boolean, +): { stripped: string; proofRanges: ProofRange[] } { + const spanTagRegex = /<\/?span\b[^>]*>/gi; + const stack: StripStackEntry[] = []; + const proofRanges: ProofRange[] = []; + let result = ''; + let lastIndex = 0; + + for (const match of markdown.matchAll(spanTagRegex)) { + const index = match.index ?? -1; + if (index < 0) continue; + const tag = match[0]; + result += markdown.slice(lastIndex, index); + lastIndex = index + tag.length; + + if (tag.startsWith('= entry.contentStart) { + proofRanges.push({ + id: entry.proofId, + start: entry.contentStart, + end: result.length, + }); + } + } else { + result += tag; + } + continue; + } + + const isProof = shouldStrip(tag); + if (isProof) { + stack.push({ + isProof: true, + proofId: extractProofSpanId(tag), + contentStart: result.length, + }); + continue; + } + + result += tag; + stack.push({ + isProof: false, + proofId: null, + contentStart: result.length, + }); + } + + result += markdown.slice(lastIndex); + return { stripped: result, proofRanges }; +} + +function mergeRanges(ranges: Array<{ start: number; end: number }>): Array<{ start: number; end: number }> { + if (ranges.length === 0) return []; + const sorted = ranges + .filter((range) => range.end > range.start) + .sort((a, b) => (a.start - b.start) || (a.end - b.end)); + if (sorted.length === 0) return []; + + const merged: Array<{ start: number; end: number }> = [{ ...sorted[0] }]; + for (let index = 1; index < sorted.length; index += 1) { + const current = sorted[index]; + const last = merged[merged.length - 1]; + if (current.start <= last.end) { + last.end = Math.max(last.end, current.end); + continue; + } + merged.push({ ...current }); + } + return merged; +} + +function isGapFullyCovered( + coverage: Array<{ start: number; end: number }>, + start: number, + end: number, +): boolean { + if (end <= start) return true; + let cursor = start; + for (const range of coverage) { + if (range.end <= cursor) continue; + if (range.start > cursor) return false; + cursor = Math.max(cursor, range.end); + if (cursor >= end) return true; + } + return false; +} + +function buildReplacementGroups( + proofRanges: ProofRange[], + replacementsById: Record, +): Array<{ start: number; end: number; replacement: string }> { + const replacementIds = Object.keys(replacementsById); + if (replacementIds.length === 0 || proofRanges.length === 0) return []; + + const rangesById = new Map(); + for (const range of proofRanges) { + if (!(range.id in replacementsById) || range.end <= range.start) continue; + const existing = rangesById.get(range.id); + if (existing) { + existing.push(range); + } else { + rangesById.set(range.id, [range]); + } + } + + const coverage = mergeRanges(proofRanges.map(({ start, end }) => ({ start, end }))); + const groups: Array<{ start: number; end: number; replacement: string }> = []; + + for (const [id, ranges] of rangesById.entries()) { + const replacement = replacementsById[id]; + if (typeof replacement !== 'string') continue; + const sorted = [...ranges].sort((a, b) => (a.start - b.start) || (a.end - b.end)); + let currentStart = sorted[0]?.start ?? -1; + let currentEnd = sorted[0]?.end ?? -1; + + for (let index = 1; index < sorted.length; index += 1) { + const next = sorted[index]; + if (next.start <= currentEnd || isGapFullyCovered(coverage, currentEnd, next.start)) { + currentEnd = Math.max(currentEnd, next.end); + continue; + } + if (currentEnd > currentStart) { + groups.push({ start: currentStart, end: currentEnd, replacement }); + } + currentStart = next.start; + currentEnd = next.end; + } + + if (currentEnd > currentStart) { + groups.push({ start: currentStart, end: currentEnd, replacement }); + } + } + + groups.sort((a, b) => (a.start - b.start) || (b.end - a.end)); + const selected: Array<{ start: number; end: number; replacement: string }> = []; + for (const group of groups) { + const previous = selected[selected.length - 1]; + if (previous && group.start >= previous.start && group.end <= previous.end) { + continue; + } + selected.push(group); + } + return selected; +} + +function applyReplacementGroups( + stripped: string, + groups: Array<{ start: number; end: number; replacement: string }>, +): string { + if (groups.length === 0) return stripped; + let result = ''; + let cursor = 0; + for (const group of groups) { + if (group.start < cursor) continue; + result += stripped.slice(cursor, group.start); + result += group.replacement; + cursor = group.end; + } + result += stripped.slice(cursor); + return result; +} + +function stripProofSpanTagsInternal( + markdown: string, + shouldStrip: (tag: string) => boolean, + replacementsById?: Record, +): string { + if (replacementsById) { + const { stripped, proofRanges } = collectStrippedProofData(markdown, shouldStrip); + return applyReplacementGroups(stripped, buildReplacementGroups(proofRanges, replacementsById)); + } + + const spanTagRegex = /<\/?span\b[^>]*>/gi; + const proofStack: Array<{ isProof: boolean; suppressContent: boolean }> = []; + let result = ''; + let lastIndex = 0; + + for (const match of markdown.matchAll(spanTagRegex)) { + const index = match.index ?? -1; + if (index < 0) continue; + const tag = match[0]; + + if (!hasActiveSuppression(proofStack)) { + result += markdown.slice(lastIndex, index); + } + lastIndex = index + tag.length; + + const isClosing = tag.startsWith('` HTML tags from markdown, + * leaving the inner text content intact. Non-Proof `` tags are preserved. + * + * Used by: + * - Agent snapshot endpoint (block markdown) + * - Agent edit operations (anchor/search matching) + * - Share text/markdown content negotiation + */ +export function stripProofSpanTags(markdown: string): string { + return stripProofSpanTagsInternal(markdown, isAuthoredProofSpan); +} + +/** + * Strip all Proof `` wrappers from markdown while preserving + * their inner text. Non-Proof spans remain intact. + */ +export function stripAllProofSpanTags(markdown: string): string { + return stripProofSpanTagsInternal(markdown, isAnyProofSpan); +} + +export function stripAllProofSpanTagsWithReplacements( + markdown: string, + replacementsById: Record, +): string { + return stripProofSpanTagsInternal(markdown, isAnyProofSpan, replacementsById); +} + +export function buildProofSpanReplacementMap( + marks: Record, +): Record { + const replacements: Record = {}; + for (const [id, mark] of Object.entries(marks)) { + if (typeof mark?.quote !== 'string' || mark.quote.trim().length === 0) continue; + if ( + mark.kind === 'comment' + || mark.kind === 'insert' + || mark.kind === 'delete' + || mark.kind === 'replace' + || mark.kind === 'approved' + || mark.kind === 'flagged' + ) { + replacements[id] = mark.quote; + } + } + return replacements; +} + +/** + * Build a mapping from stripped-text indices back to original-text indices. + * Returns an array where strippedToOriginal[i] is the index in the original + * string corresponding to position i in the stripped string. + */ +export function buildStrippedIndexMap(markdown: string): { stripped: string; map: number[] } { + const spanTagRegex = /<\/?span\b[^>]*>/gi; + const proofStack: boolean[] = []; + const resultChars: string[] = []; + const indexMap: number[] = []; + let lastIndex = 0; + + for (const match of markdown.matchAll(spanTagRegex)) { + const matchIndex = match.index ?? -1; + if (matchIndex < 0) continue; + const tag = match[0]; + + // Copy characters between last tag and this tag + for (let i = lastIndex; i < matchIndex; i++) { + resultChars.push(markdown[i]); + indexMap.push(i); + } + lastIndex = matchIndex + tag.length; + + const isClosing = tag.startsWith(']*>/gi; + const stack: Array<{ authored: boolean; openStart: number; contentStart: number }> = []; + const spans: AuthoredSpanBounds[] = []; + + for (const match of markdown.matchAll(spanTagRegex)) { + const matchIndex = match.index ?? -1; + if (matchIndex < 0) continue; + const tag = match[0]; + + if (tag.startsWith(' bestCloseEnd) { + bestCloseEnd = span.closeEnd; + advanced = true; + } + } + + if (!advanced) return nextIndex; + nextIndex = bestCloseEnd; + } +} diff --git a/apps/proof-cloudflare/src/share-types.ts b/apps/proof-cloudflare/src/share-types.ts new file mode 100644 index 0000000..90e29b2 --- /dev/null +++ b/apps/proof-cloudflare/src/share-types.ts @@ -0,0 +1,26 @@ +export type ShareState = 'ACTIVE' | 'PAUSED' | 'REVOKED' | 'DELETED'; + +export type ShareRole = 'viewer' | 'commenter' | 'editor' | 'owner_bot'; + +export const ACTIVE_SHARE_STATES: ReadonlySet = new Set([ + 'ACTIVE', +]); + +export const MUTABLE_SHARE_STATES: ReadonlySet = new Set([ + 'ACTIVE', + 'PAUSED', +]); + +export function isShareState(value: unknown): value is ShareState { + return value === 'ACTIVE' + || value === 'PAUSED' + || value === 'REVOKED' + || value === 'DELETED'; +} + +export function isShareRole(value: unknown): value is ShareRole { + return value === 'viewer' + || value === 'commenter' + || value === 'editor' + || value === 'owner_bot'; +} diff --git a/apps/proof-cloudflare/src/storage-do.ts b/apps/proof-cloudflare/src/storage-do.ts new file mode 100644 index 0000000..f0728a2 --- /dev/null +++ b/apps/proof-cloudflare/src/storage-do.ts @@ -0,0 +1,513 @@ +/** + * Durable Object SQLite implementation of DocumentStorage. + * + * Each DO instance is scoped to a single document slug. The slug parameter on + * interface methods is validated but the actual queries always target the + * single-document tables owned by this DO. + * + * Tables are created lazily on first access. SQL syntax targets the standard + * SQLite dialect available in Cloudflare DO `ctx.storage.sql`. + */ + +import type { + DocumentStorage, + StorageDocumentEventRow, + StorageDocumentRow, + StorageIdempotencyRecord, +} from './storage-interface.js'; +import type { DocumentEventType } from './event-types.js'; +import type { ShareRole, ShareState } from './share-types.js'; + +const DEFAULT_EVENT_PAGE_SIZE = 100; + +/** + * Options for constructing a DODocumentStorage instance. + */ +interface DODocumentStorageOptions { + /** The DurableObjectState providing `storage.sql`. */ + ctx: DurableObjectState; + /** The document slug this DO instance manages. */ + slug: string; +} + +export class DODocumentStorage implements DocumentStorage { + private readonly sql: SqlStorage; + private readonly slug: string; + // Tables are created in DocumentSession.initStorage() at DO construction time. + // Set to true by default since we can rely on them existing. + private tablesInitialized = true; + + constructor(options: DODocumentStorageOptions) { + this.sql = options.ctx.storage.sql; + this.slug = options.slug; + } + + // ------------------------------------------------------------------------- + // Schema initialization + // ------------------------------------------------------------------------- + + private ensureTables(): void { + if (this.tablesInitialized) return; + + this.sql.exec(` + CREATE TABLE IF NOT EXISTS documents ( + slug TEXT PRIMARY KEY, + doc_id TEXT, + title TEXT, + markdown TEXT NOT NULL DEFAULT '', + marks TEXT NOT NULL DEFAULT '{}', + revision INTEGER NOT NULL DEFAULT 0, + y_state_version INTEGER NOT NULL DEFAULT 0, + share_state TEXT NOT NULL DEFAULT 'ACTIVE', + access_epoch INTEGER NOT NULL DEFAULT 0, + active INTEGER NOT NULL DEFAULT 1, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + updated_at TEXT NOT NULL DEFAULT (datetime('now')), + deleted_at TEXT + ) + `); + + this.sql.exec(` + CREATE TABLE IF NOT EXISTS document_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + document_slug TEXT NOT NULL, + document_revision INTEGER, + event_type TEXT NOT NULL, + event_data TEXT NOT NULL DEFAULT '{}', + actor TEXT NOT NULL DEFAULT '', + idempotency_key TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + acked_by TEXT, + acked_at TEXT + ) + `); + + this.sql.exec(` + CREATE TABLE IF NOT EXISTS mutation_outbox ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + document_slug TEXT NOT NULL, + document_revision INTEGER, + event_id INTEGER, + event_type TEXT NOT NULL, + event_data TEXT NOT NULL DEFAULT '{}', + actor TEXT NOT NULL DEFAULT '', + idempotency_key TEXT, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + delivered_at TEXT + ) + `); + + this.sql.exec(` + CREATE TABLE IF NOT EXISTS idempotency_keys ( + idempotency_key TEXT NOT NULL, + document_slug TEXT NOT NULL, + route TEXT NOT NULL, + response_json TEXT NOT NULL, + request_hash TEXT, + status_code INTEGER NOT NULL DEFAULT 200, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + PRIMARY KEY (idempotency_key, document_slug, route) + ) + `); + + this.sql.exec(` + CREATE TABLE IF NOT EXISTS document_access ( + token_id TEXT PRIMARY KEY, + document_slug TEXT NOT NULL, + role TEXT NOT NULL, + secret_hash TEXT NOT NULL, + created_at TEXT NOT NULL DEFAULT (datetime('now')), + revoked_at TEXT + ) + `); + + this.tablesInitialized = true; + } + + // ------------------------------------------------------------------------- + // Helpers + // ------------------------------------------------------------------------- + + private assertSlug(slug: string): void { + if (slug !== this.slug) { + throw new Error( + `DODocumentStorage slug mismatch: expected "${this.slug}", got "${slug}"`, + ); + } + } + + private getDocumentRevision(): number | null { + this.ensureTables(); + const rows = this.sql + .exec('SELECT revision FROM documents WHERE slug = ? LIMIT 1', this.slug) + .toArray(); + if (rows.length === 0) return null; + const rev = rows[0]['revision']; + return typeof rev === 'number' ? rev : null; + } + + private now(): string { + return new Date().toISOString(); + } + + // ------------------------------------------------------------------------- + // DocumentStorage implementation + // ------------------------------------------------------------------------- + + getDocumentBySlug(slug: string): StorageDocumentRow | undefined { + this.assertSlug(slug); + this.ensureTables(); + const rows = this.sql + .exec('SELECT * FROM documents WHERE slug = ? LIMIT 1', this.slug) + .toArray(); + if (rows.length === 0) return undefined; + return rows[0] as unknown as StorageDocumentRow; + } + + updateDocument( + slug: string, + markdown: string, + marks?: Record, + ): boolean { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + if (marks !== undefined) { + this.sql.exec( + `UPDATE documents + SET markdown = ?, marks = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + JSON.stringify(marks), + ts, + this.slug, + ); + } else { + this.sql.exec( + `UPDATE documents + SET markdown = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + ts, + this.slug, + ); + } + // DO SQLite sql.exec doesn't return changes count directly; + // verify by re-reading the updated_at timestamp + const doc = this.getDocumentBySlug(slug); + return doc !== undefined && doc.updated_at === ts; + } + + updateDocumentAtomic( + slug: string, + expectedUpdatedAt: string, + markdown: string, + marks?: Record, + ): boolean { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + if (marks !== undefined) { + this.sql.exec( + `UPDATE documents + SET markdown = ?, marks = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND updated_at = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + JSON.stringify(marks), + ts, + this.slug, + expectedUpdatedAt, + ); + } else { + this.sql.exec( + `UPDATE documents + SET markdown = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND updated_at = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + ts, + this.slug, + expectedUpdatedAt, + ); + } + const doc = this.getDocumentBySlug(slug); + return doc !== undefined && doc.updated_at === ts; + } + + updateDocumentAtomicByRevision( + slug: string, + expectedRevision: number, + markdown: string, + marks?: Record, + ): boolean { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + if (marks !== undefined) { + this.sql.exec( + `UPDATE documents + SET markdown = ?, marks = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND revision = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + JSON.stringify(marks), + ts, + this.slug, + expectedRevision, + ); + } else { + this.sql.exec( + `UPDATE documents + SET markdown = ?, updated_at = ?, revision = revision + 1 + WHERE slug = ? AND revision = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + markdown, + ts, + this.slug, + expectedRevision, + ); + } + const doc = this.getDocumentBySlug(slug); + return doc !== undefined && doc.updated_at === ts; + } + + getMarks(slug: string): string | null { + this.assertSlug(slug); + const doc = this.getDocumentBySlug(slug); + if (!doc) return null; + return doc.marks; + } + + setMarks(slug: string, marks: Record): boolean { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + this.sql.exec( + `UPDATE documents + SET marks = ?, updated_at = ? + WHERE slug = ? AND share_state IN ('ACTIVE', 'PAUSED')`, + JSON.stringify(marks), + ts, + this.slug, + ); + const doc = this.getDocumentBySlug(slug); + return doc !== undefined && doc.updated_at === ts; + } + + addDocumentEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + idempotencyKey?: string, + ): number { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + const payload = JSON.stringify(eventData); + const documentRevision = this.getDocumentRevision(); + + this.sql.exec( + `INSERT INTO document_events + (document_slug, document_revision, event_type, event_data, actor, idempotency_key, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?)`, + this.slug, + documentRevision, + eventType, + payload, + actor, + idempotencyKey ?? null, + ts, + ); + + // Retrieve the auto-increment id of the inserted event + const eventRows = this.sql + .exec( + 'SELECT id FROM document_events WHERE document_slug = ? ORDER BY id DESC LIMIT 1', + this.slug, + ) + .toArray(); + const eventId = eventRows.length > 0 ? (eventRows[0]['id'] as number) : 0; + + this.sql.exec( + `INSERT INTO mutation_outbox + (document_slug, document_revision, event_id, event_type, event_data, actor, idempotency_key, created_at, delivered_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, NULL)`, + this.slug, + documentRevision, + eventId, + eventType, + payload, + actor, + idempotencyKey ?? null, + ts, + ); + + return eventId; + } + + addEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + ): void { + this.assertSlug(slug); + // In the DO context there is no separate global `events` table. + // Delegate to addDocumentEvent which writes to document_events + outbox. + this.addDocumentEvent(slug, eventType, eventData, actor); + } + + listDocumentEvents( + slug: string, + afterId: number, + limit: number = DEFAULT_EVENT_PAGE_SIZE, + ): StorageDocumentEventRow[] { + this.assertSlug(slug); + this.ensureTables(); + const safeLimit = Math.max(1, Math.min(limit, 500)); + const rows = this.sql + .exec( + `SELECT * FROM document_events + WHERE document_slug = ? AND id > ? + ORDER BY id ASC + LIMIT ?`, + this.slug, + afterId, + safeLimit, + ) + .toArray(); + return rows as unknown as StorageDocumentEventRow[]; + } + + ackDocumentEvents(slug: string, upToId: number, ackedBy: string): number { + this.assertSlug(slug); + this.ensureTables(); + const ts = this.now(); + // Count matching rows before update since DO sql.exec doesn't return changes + const beforeRows = this.sql + .exec( + `SELECT COUNT(*) AS cnt FROM document_events + WHERE document_slug = ? AND id <= ? AND acked_at IS NULL`, + this.slug, + upToId, + ) + .toArray(); + const count = (beforeRows[0]?.['cnt'] as number) ?? 0; + + if (count > 0) { + this.sql.exec( + `UPDATE document_events + SET acked_by = ?, acked_at = ? + WHERE document_slug = ? AND id <= ? AND acked_at IS NULL`, + ackedBy, + ts, + this.slug, + upToId, + ); + } + + return count; + } + + getStoredIdempotencyRecord( + documentSlug: string, + route: string, + idempotencyKey: string, + ): StorageIdempotencyRecord | null { + this.assertSlug(documentSlug); + this.ensureTables(); + const rows = this.sql + .exec( + `SELECT response_json, request_hash + FROM idempotency_keys + WHERE idempotency_key = ? AND document_slug = ? AND route = ? + LIMIT 1`, + idempotencyKey, + documentSlug, + route, + ) + .toArray(); + + if (rows.length === 0) return null; + const row = rows[0]; + const responseJson = row['response_json']; + if (typeof responseJson !== 'string') return null; + + try { + const response = JSON.parse(responseJson) as Record; + const requestHash = row['request_hash']; + return { + response, + requestHash: typeof requestHash === 'string' ? requestHash : null, + }; + } catch { + return null; + } + } + + storeIdempotencyResult( + documentSlug: string, + route: string, + idempotencyKey: string, + response: Record, + requestHash?: string | null, + options?: { statusCode?: number; tombstoneRevision?: number | null }, + ): void { + this.assertSlug(documentSlug); + this.ensureTables(); + const ts = this.now(); + const statusCode = Number.isInteger(options?.statusCode) + ? Number(options?.statusCode) + : 200; + const encoded = JSON.stringify(response); + + this.sql.exec( + `INSERT OR REPLACE INTO idempotency_keys + (idempotency_key, document_slug, route, response_json, request_hash, status_code, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?)`, + idempotencyKey, + documentSlug, + route, + encoded, + requestHash ?? null, + statusCode, + ts, + ); + } + + resolveDocumentAccessRole( + slug: string, + presentedSecret: string, + ): ShareRole | null { + this.assertSlug(slug); + this.ensureTables(); + + // Access resolution requires comparing hashed secrets. The DO sql API is + // synchronous so we cannot use the async Web Crypto digest here. Instead, + // the caller is expected to pre-hash the secret before calling this method, + // or the tokens should be stored with a hash that can be compared directly. + // + // For now we do a direct comparison of the presented value against the + // stored secret_hash column. A full production integration would hash the + // presented secret before comparison (e.g., via an async wrapper around + // this method). + + const rows = this.sql + .exec( + `SELECT role, secret_hash FROM document_access + WHERE document_slug = ? AND revoked_at IS NULL`, + this.slug, + ) + .toArray(); + + for (const row of rows) { + const storedHash = row['secret_hash']; + const role = row['role']; + if (typeof storedHash === 'string' && typeof role === 'string') { + if (storedHash === presentedSecret) { + return role as ShareRole; + } + } + } + + return null; + } +} diff --git a/apps/proof-cloudflare/src/storage-interface.ts b/apps/proof-cloudflare/src/storage-interface.ts new file mode 100644 index 0000000..fc70bb2 --- /dev/null +++ b/apps/proof-cloudflare/src/storage-interface.ts @@ -0,0 +1,122 @@ +/** + * Abstract storage interface for per-document operations. + * + * Copied from server/storage-interface.ts with local import paths. + * This interface enables the same route logic to target both the Node.js + * SQLite backend and Cloudflare Durable Object SQLite. + */ + +import type { DocumentEventType } from './event-types.js'; +import type { ShareRole, ShareState } from './share-types.js'; + +// --------------------------------------------------------------------------- +// Row types +// --------------------------------------------------------------------------- + +/** Core document record as stored in the `documents` table. */ +export interface StorageDocumentRow { + slug: string; + doc_id: string | null; + title: string | null; + markdown: string; + marks: string; + revision: number; + y_state_version: number; + share_state: ShareState; + access_epoch: number; + active: number; + created_at: string; + updated_at: string; + deleted_at: string | null; +} + +/** A single row from the `document_events` table. */ +export interface StorageDocumentEventRow { + id: number; + document_slug: string; + document_revision: number | null; + event_type: string; + event_data: string; + actor: string; + idempotency_key: string | null; + created_at: string; + acked_by: string | null; + acked_at: string | null; +} + +/** Stored idempotency lookup result. */ +export interface StorageIdempotencyRecord { + response: Record; + requestHash: string | null; +} + +// --------------------------------------------------------------------------- +// Interface +// --------------------------------------------------------------------------- + +export interface DocumentStorage { + getDocumentBySlug(slug: string): StorageDocumentRow | undefined; + + updateDocument( + slug: string, + markdown: string, + marks?: Record, + ): boolean; + + updateDocumentAtomic( + slug: string, + expectedUpdatedAt: string, + markdown: string, + marks?: Record, + ): boolean; + + updateDocumentAtomicByRevision( + slug: string, + expectedRevision: number, + markdown: string, + marks?: Record, + ): boolean; + + getMarks(slug: string): string | null; + setMarks(slug: string, marks: Record): boolean; + + addDocumentEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + idempotencyKey?: string, + ): number; + + addEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + ): void; + + listDocumentEvents( + slug: string, + afterId: number, + limit?: number, + ): StorageDocumentEventRow[]; + + ackDocumentEvents(slug: string, upToId: number, ackedBy: string): number; + + getStoredIdempotencyRecord( + documentSlug: string, + route: string, + idempotencyKey: string, + ): StorageIdempotencyRecord | null; + + storeIdempotencyResult( + documentSlug: string, + route: string, + idempotencyKey: string, + response: Record, + requestHash?: string | null, + options?: { statusCode?: number; tombstoneRevision?: number | null }, + ): void; + + resolveDocumentAccessRole(slug: string, presentedSecret: string): ShareRole | null; +} diff --git a/apps/proof-cloudflare/tsconfig.json b/apps/proof-cloudflare/tsconfig.json new file mode 100644 index 0000000..3125633 --- /dev/null +++ b/apps/proof-cloudflare/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "resolveJsonModule": true, + "declaration": false, + "lib": ["ES2022"], + "types": ["@cloudflare/workers-types"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/apps/proof-cloudflare/wrangler.jsonc b/apps/proof-cloudflare/wrangler.jsonc new file mode 100644 index 0000000..93c6797 --- /dev/null +++ b/apps/proof-cloudflare/wrangler.jsonc @@ -0,0 +1,42 @@ +{ + "name": "proof-cloudflare", + "main": "src/index.ts", + "compatibility_date": "2024-12-01", + "compatibility_flags": ["nodejs_compat"], + + // Durable Objects + "durable_objects": { + "bindings": [ + { + "name": "DOCUMENT_SESSION", + "class_name": "DocumentSession" + } + ] + }, + + "migrations": [ + { + "tag": "v1", + "new_sqlite_classes": ["DocumentSession"] + } + ], + + // D1 database for document catalog + "d1_databases": [ + { + "binding": "CATALOG_DB", + "database_name": "proof-catalog", + "database_id": "846224ed-030d-4013-b09c-d23ba85555f0", + "migrations_dir": "d1/migrations" + } + ], + + // Serve the built editor from dist/ + "assets": { + "directory": "../../dist/", + "binding": "ASSETS", + "not_found_handling": "none", + // Run worker first for: root (doc creation), doc pages, API routes, WebSocket + "run_worker_first": ["/", "/d/*", "/api/*", "/documents/*", "/ws", "/ws/*", "/health"] + } +} diff --git a/docs/DEPLOYMENT.md b/docs/DEPLOYMENT.md new file mode 100644 index 0000000..49d122e --- /dev/null +++ b/docs/DEPLOYMENT.md @@ -0,0 +1,235 @@ +# Deployment + +Proof SDK has two deployment targets: the Express server (any Node.js host) and Cloudflare Workers with Durable Objects. Both serve the same editor frontend and agent HTTP bridge. + +| | Express | Cloudflare Workers | +|---|---------|-------------------| +| Architecture | Node.js process, SQLite on disk, Hocuspocus collab | Worker + Durable Objects, D1 catalog, DO-embedded SQLite | +| Scaling | Vertical (single process) | Horizontal (per-document DO isolation) | +| State | Single SQLite file | D1 for catalog, SQLite-in-DO per document | +| Cold start | None (long-running process) | ~10-50ms (Worker + DO) | +| WebSocket collab | Hocuspocus on dedicated port | WebSocket via Durable Object | +| Cost | Host-dependent | Workers Paid ($5/mo base) | +| Best for | Familiarity, self-hosting, full feature set | Global edge, per-document isolation, zero-ops scaling | + +## Prerequisites + +- Node.js 18+ +- `npm install && npm run build` (Vite IIFE bundle to `dist/`) + +## Option 1: Express Server + +### Local production test + +```bash +npm run serve +``` + +Express listens on `:4000`, Hocuspocus collab on `:4001`. SQLite (`proof-share.db`) is auto-created on first run — no migrations needed. + +### Environment variables + +Core: + +| Variable | Default | Description | +|----------|---------|-------------| +| `PORT` | `4000` | HTTP listen port | +| `NODE_ENV` | `development` | Node environment | +| `PROOF_ENV` | from `NODE_ENV` | Proof-specific environment override | +| `DATABASE_PATH` | `./proof-share.db` | SQLite database path | + +Auth: + +| Variable | Default | Description | +|----------|---------|-------------| +| `PROOF_SHARE_MARKDOWN_AUTH_MODE` | `none` | Auth mode: `none` or `api_key` | +| `PROOF_SHARE_MARKDOWN_API_KEY` | — | Required when auth mode is `api_key` | + +Collab: + +| Variable | Default | Description | +|----------|---------|-------------| +| `PROOF_COLLAB_SIGNING_SECRET` | auto-generated | Signing secret for collab tokens. Auto-generated in dev; set explicitly in production for stable sessions across restarts | +| `COLLAB_PORT` | `PORT + 1` | Hocuspocus WebSocket port | +| `COLLAB_PUBLIC_BASE_URL` | `ws://localhost:` | Public WebSocket URL for clients | + +Network: + +| Variable | Default | Description | +|----------|---------|-------------| +| `PROOF_CORS_ALLOW_ORIGINS` | localhost:3000,4000 | Comma-separated allowed CORS origins | +| `PROOF_PUBLIC_ORIGIN` | auto-detected | Public URL for link generation | +| `PROOF_PUBLIC_BASE_URL` | auto-detected | Base URL for API responses | + +Many additional tuning variables exist for collab timeouts, agent edit stability, snapshot storage, and more. See `server/collab.ts` and `server/agent-routes.ts` for the full set. + +### Deploy to Railway + +1. Connect your GitHub repo in the Railway dashboard +2. Set environment variables: + ``` + NODE_ENV=production + PROOF_COLLAB_SIGNING_SECRET= + PROOF_CORS_ALLOW_ORIGINS=https://your-domain.up.railway.app + PROOF_PUBLIC_ORIGIN=https://your-domain.up.railway.app + ``` + (`PORT` is auto-assigned by Railway) +3. Add a volume mounted at `/data` for SQLite persistence +4. Set `DATABASE_PATH=/data/proof-share.db` +5. Generate a domain in Railway settings +6. Verify: `curl https://your-domain.up.railway.app/health` + +### Other Node.js hosts + +Works on any host that runs Node.js — Render, Fly.io, DigitalOcean App Platform, etc. Key requirements: + +- **Persistent volume** for SQLite (ephemeral filesystems lose data on redeploy) +- **WebSocket support** for realtime collab (both HTTP and WS ports) +- **`PORT` env var** (most PaaS platforms set this automatically) + +## Option 2: Cloudflare Workers + Durable Objects + +### Prerequisites + +- Cloudflare account with **Workers Paid plan** ($5/mo) — required for Durable Objects +- Wrangler CLI: + ```bash + npm install -g wrangler + wrangler login + ``` + +### Setup + +1. Create the D1 database: + ```bash + wrangler d1 create proof-catalog + ``` + +2. Update `database_id` in `apps/proof-cloudflare/wrangler.jsonc` with the ID from step 1. + +3. Run the D1 migration: + ```bash + cd apps/proof-cloudflare + wrangler d1 migrations apply proof-catalog --remote + ``` + +4. Build the frontend (from repo root): + ```bash + npm run build + ``` + +5. Deploy: + ```bash + cd apps/proof-cloudflare + npx wrangler deploy + ``` + +### Local development + +```bash +npm run build # build frontend assets +cd apps/proof-cloudflare && npm run dev # Miniflare with D1 + DO simulation +``` + +### Configuration + +All bindings are declared in `wrangler.jsonc`: + +- **D1** (`CATALOG_DB`) — document catalog (slug, title, DO mapping) +- **Durable Objects** (`DOCUMENT_SESSION`) — per-document state, collab, marks +- **Assets** — serves the built editor from `../../dist/` +- **`run_worker_first`** — routes `/`, `/d/*`, `/api/*`, `/documents/*`, `/ws`, `/health` to the Worker before falling through to static assets + +DO migrations (`new_sqlite_classes`) are declared in config and auto-applied on deploy. + +### CI/CD + +A GitHub Actions workflow is included at `.github/workflows/deploy-cloudflare.yml`. It's **manual-trigger only** (`workflow_dispatch`) — it won't run on push or merge. This is intentional: the repo is an SDK, and most contributors won't have or need a Cloudflare deployment. The workflow is there so that anyone who *does* deploy can use it without writing their own. + +**Setup:** + +1. Create a Cloudflare API token with `Workers Scripts:Edit` and `D1:Edit` permissions +2. Add it as `CLOUDFLARE_API_TOKEN` in your repo (Settings > Secrets and variables > Actions) +3. Trigger from the Actions tab or CLI: + ```bash + gh workflow run deploy-cloudflare.yml + ``` + +The workflow runs `npm install`, `npm run build` (frontend bundle), then `wrangler deploy` from `apps/proof-cloudflare/`. D1 and Durable Object migrations are applied automatically by Wrangler on each deploy. + +**Upgrading to auto-deploy:** + +If you're running your own fork and want every merge to `main` to deploy automatically, add a `push` trigger alongside the existing `workflow_dispatch`: + +```yaml +on: + push: + branches: [main] + paths: + - "apps/proof-cloudflare/**" + - "src/**" + - "packages/**" + - "package.json" + workflow_dispatch: + inputs: + environment: + description: "Deployment environment" + required: false + default: "production" + type: choice + options: + - production + - preview +``` + +The `paths` filter avoids deploying on doc-only changes. You can still trigger manually for off-cycle deploys or to select the preview environment. + +### Custom domain + +Add `routes` or `custom_domain` to `wrangler.jsonc`, or configure via the Cloudflare dashboard under Workers > your worker > Settings > Domains & Routes. + +### Secrets + +If using API key auth: + +```bash +wrangler secret put PROOF_API_KEY +``` + +## Architecture Comparison + +| | Express | Workers + DO | +|---|---------|-------------| +| Scaling model | Single process, vertical | Per-document Durable Object, horizontal | +| Document isolation | Shared process, shared SQLite | Each document in its own DO with embedded SQLite | +| Cold start | None | ~10-50ms | +| Failure blast radius | Whole server | Single document | +| Horizontal scaling | Requires external coordination | Automatic per-document | +| WebSocket | Hocuspocus (separate port) | Native DO WebSocket | +| Persistence | SQLite file on disk | D1 (catalog) + DO storage (documents) | + +**Choose Express** when you want full feature parity with the reference server, need a single deployment artifact, or are already running Node.js infrastructure. + +**Choose Workers** when you need global edge deployment, per-document isolation, or want to avoid managing servers. + +## Health Checks + +- **Express**: `GET /health` — returns JSON with `ok`, build info, and collab runtime state +- **Workers**: `GET /health` — returns `200 OK` + +## Verify Your Deployment + +1. Visit the root URL — you should see the Proof SDK landing page +2. Create a document: `curl -X POST https://your-host/documents` +3. Open the returned URL in two browser tabs to verify realtime collab +4. Test agent endpoints: + ```bash + # Get document state + curl https://your-host/documents//state + + # Add a comment + curl -X POST https://your-host/documents//bridge/comments \ + -H "Content-Type: application/json" \ + -d '{"text": "Test comment", "target": "first paragraph"}' + ``` +5. Check health: `curl https://your-host/health` diff --git a/docs/adr/2026-03-cloudflare-workers-deployment.md b/docs/adr/2026-03-cloudflare-workers-deployment.md new file mode 100644 index 0000000..c0d0ba4 --- /dev/null +++ b/docs/adr/2026-03-cloudflare-workers-deployment.md @@ -0,0 +1,31 @@ +# ADR: Cloudflare Workers Deployment Target + +## Status + +Proposed + +## Decision + +We are adding a Cloudflare Workers deployment target under `apps/proof-cloudflare/`. It runs the same editor frontend and agent bridge as the Express server, but on Cloudflare's edge runtime using Durable Objects for per-document state. + +The architecture maps Proof SDK concepts to Cloudflare primitives: + +| Proof SDK concept | Express server | Cloudflare Workers | +|---|---|---| +| Document catalog | SQLite table | D1 database | +| Document state + collab | Shared SQLite + Hocuspocus | Durable Object with embedded SQLite | +| WebSocket collab | Hocuspocus on separate port | Native DO WebSocket | +| Static assets | `express.static` / Vite dev server | Workers Assets | +| Agent bridge routes | Express middleware | Worker fetch handler → DO delegation | + +Each document gets its own Durable Object instance. The Worker's fetch handler routes requests to the correct DO by slug, using D1 as the slug-to-DO-ID catalog. This gives per-document isolation: a crash or timeout in one document does not affect others. + +The Worker reuses the shared `dist/` bundle built by `npm run build` — no separate frontend build is needed. + +## Consequences + +- The Express server remains the reference deployment and the default for local development. The Worker is an alternative, not a replacement. +- Agent bridge routes must be implemented independently in the Worker since they cannot import Express middleware directly. The route surface is kept in sync manually. +- Durable Object SQLite is local to each DO instance, not queryable across documents. Cross-document queries (e.g., listing all documents) go through D1. +- Workers Paid plan ($5/mo) is required for Durable Objects. This is a hosting cost, not a SDK licensing concern. +- The `apps/` directory establishes a pattern for additional deployment targets (e.g., Docker, serverless) without modifying the core SDK. diff --git a/docs/agent-docs.md b/docs/agent-docs.md index 6a956d8..9c4da77 100644 --- a/docs/agent-docs.md +++ b/docs/agent-docs.md @@ -1,5 +1,7 @@ # Proof Agent Docs +> **Deployment note**: Examples use `localhost:4000` (local Express server). The same routes and API apply to all Proof SDK deployments — Express, Cloudflare Workers, etc. Replace `localhost:4000` with your deployment URL. See `docs/DEPLOYMENT.md` for setup options. + ## Proof SDK Route Alias Hosted Proof keeps the `/api/agent/*` and `/share/markdown` compatibility routes. diff --git a/docs/proof.SKILL.md b/docs/proof.SKILL.md index 0cff2b9..3ab2275 100644 --- a/docs/proof.SKILL.md +++ b/docs/proof.SKILL.md @@ -12,6 +12,7 @@ Proof is the hosted product. Proof SDK is the open-source editor, collaboration - Include `by` on every write. Use `ai:`. - Treat `slug + token` as the document address and auth pair. - Prefer HTTP APIs over local runtime assumptions. +- All routes work on any Proof SDK deployment (Express, Cloudflare Workers, etc.). Examples below use `localhost:4000`; replace with your deployment URL. ## Authentication @@ -37,7 +38,7 @@ curl -sS -X POST http://localhost:4000/documents \ -d '{"title":"My Document","markdown":"# Hello\n\nFirst draft."}' ``` -Hosted Proof also keeps `POST /share/markdown` as a compatibility alias. +`POST /share/markdown` is a compatibility alias. ### Read state @@ -155,8 +156,9 @@ curl -sS -X POST "http://localhost:4000/documents//presence" \ ## References -- Discovery JSON: `http://localhost:4000/.well-known/agent.json` -- Docs: `http://localhost:4000/agent-docs` -- Setup: `http://localhost:4000/agent-setup` -- [AGENT_CONTRACT.md](/Users/danshipper/CascadeProjects/every-proof/.worktrees/proof-sdk-split/AGENT_CONTRACT.md) -- [agent-docs.md](/Users/danshipper/CascadeProjects/every-proof/.worktrees/proof-sdk-split/docs/agent-docs.md) +- Discovery JSON: `/.well-known/agent.json` +- Docs: `/agent-docs` +- Setup: `/agent-setup` +- `AGENT_CONTRACT.md` +- `docs/agent-docs.md` +- `docs/DEPLOYMENT.md` diff --git a/server/event-types.ts b/server/event-types.ts new file mode 100644 index 0000000..8d5bf83 --- /dev/null +++ b/server/event-types.ts @@ -0,0 +1,49 @@ +/** + * Typed event registry for document and agent events. + * + * Replaces freeform event type strings with a single const object and + * derived union type so that typos are caught at compile time. + */ + +export const DocumentEventType = { + // Document lifecycle + 'document.created': 'document.created', + 'document.updated': 'document.updated', + 'document.rewritten': 'document.rewritten', + 'document.deleted': 'document.deleted', + 'document.paused': 'document.paused', + 'document.resumed': 'document.resumed', + 'document.revoked': 'document.revoked', + 'document.title.updated': 'document.title.updated', + 'document.edited': 'document.edited', + + // Agent activity + 'agent.connected': 'agent.connected', + 'agent.presence': 'agent.presence', + 'agent.disconnected': 'agent.disconnected', + 'agent.edit': 'agent.edit', + 'agent.edit.v2': 'agent.edit.v2', + + // Comments + 'comment.added': 'comment.added', + 'comment.replied': 'comment.replied', + 'comment.resolved': 'comment.resolved', + 'comment.unresolved': 'comment.unresolved', + + // Suggestions — add/accept/reject + 'suggestion.insert.added': 'suggestion.insert.added', + 'suggestion.delete.added': 'suggestion.delete.added', + 'suggestion.replace.added': 'suggestion.replace.added', + 'suggestion.accepted': 'suggestion.accepted', + 'suggestion.rejected': 'suggestion.rejected', + + // Mention + 'mention': 'mention', +} as const; + +export type DocumentEventType = (typeof DocumentEventType)[keyof typeof DocumentEventType]; + +/** Runtime type guard for DocumentEventType values. */ +export function isDocumentEventType(value: unknown): value is DocumentEventType { + return typeof value === 'string' && value in DocumentEventType; +} diff --git a/server/storage-interface.ts b/server/storage-interface.ts new file mode 100644 index 0000000..cddf98a --- /dev/null +++ b/server/storage-interface.ts @@ -0,0 +1,176 @@ +/** + * Abstract storage interface for per-document operations. + * + * This interface captures the subset of `db.ts` functions that agent routes + * depend on, enabling the same route logic to target both the Node.js SQLite + * backend (better-sqlite3) and Cloudflare Durable Object SQLite. + * + * The existing `db.ts` module is NOT modified; this is a forward-looking + * extraction for use in alternative deployment targets. + */ + +import type { DocumentEventType } from './event-types.js'; +import type { ShareRole, ShareState } from './share-types.js'; + +// --------------------------------------------------------------------------- +// Row types (mirrored from db.ts to avoid coupling to better-sqlite3) +// --------------------------------------------------------------------------- + +/** Core document record as stored in the `documents` table. */ +export interface StorageDocumentRow { + slug: string; + doc_id: string | null; + title: string | null; + markdown: string; + marks: string; + revision: number; + y_state_version: number; + share_state: ShareState; + access_epoch: number; + active: number; + created_at: string; + updated_at: string; + deleted_at: string | null; +} + +/** A single row from the `document_events` table. */ +export interface StorageDocumentEventRow { + id: number; + document_slug: string; + document_revision: number | null; + event_type: string; + event_data: string; + actor: string; + idempotency_key: string | null; + created_at: string; + acked_by: string | null; + acked_at: string | null; +} + +/** Stored idempotency lookup result. */ +export interface StorageIdempotencyRecord { + response: Record; + requestHash: string | null; +} + +// --------------------------------------------------------------------------- +// Interface +// --------------------------------------------------------------------------- + +/** + * Per-document storage operations consumed by agent routes. + * + * Implementations must be scoped to a single document slug (Cloudflare DO) or + * accept a slug parameter (Node.js SQLite). The interface uses explicit slug + * parameters so that a single-slug DO implementation can simply assert or + * ignore the slug while a multi-document SQLite implementation can dispatch + * normally. + */ +export interface DocumentStorage { + // -- Document CRUD ------------------------------------------------------- + + /** Fetch a document record by slug. */ + getDocumentBySlug(slug: string): StorageDocumentRow | undefined; + + /** + * Non-atomic document update. Returns `true` if the row was updated. + * Marks are optional — when omitted, only markdown is updated. + */ + updateDocument( + slug: string, + markdown: string, + marks?: Record, + ): boolean; + + /** + * Atomic update gated on `updated_at` timestamp. + * Returns `true` if the row matched the precondition and was updated. + */ + updateDocumentAtomic( + slug: string, + expectedUpdatedAt: string, + markdown: string, + marks?: Record, + ): boolean; + + /** + * Atomic update gated on revision number. + * Returns `true` if the row matched the precondition and was updated. + */ + updateDocumentAtomicByRevision( + slug: string, + expectedRevision: number, + markdown: string, + marks?: Record, + ): boolean; + + // -- Marks --------------------------------------------------------------- + + /** Read the raw marks JSON string for a document. */ + getMarks(slug: string): string | null; + + /** Overwrite marks for a document. Returns `true` on success. */ + setMarks(slug: string, marks: Record): boolean; + + // -- Events -------------------------------------------------------------- + + /** + * Write a document-scoped event (written to both `document_events` and + * the `mutation_outbox`). Returns the new event row id. + */ + addDocumentEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + idempotencyKey?: string, + ): number; + + /** + * Write a global event (written to the general `events` table as well as + * `document_events` + `mutation_outbox`). + */ + addEvent( + slug: string, + eventType: DocumentEventType, + eventData: unknown, + actor: string, + ): void; + + /** + * List document events with id > `afterId`, up to `limit` rows. + * Used by the agent polling endpoint. + */ + listDocumentEvents( + slug: string, + afterId: number, + limit?: number, + ): StorageDocumentEventRow[]; + + /** Mark events up to `upToId` as acknowledged. Returns rows affected. */ + ackDocumentEvents(slug: string, upToId: number, ackedBy: string): number; + + // -- Idempotency --------------------------------------------------------- + + /** Look up a previously stored idempotency result. */ + getStoredIdempotencyRecord( + documentSlug: string, + route: string, + idempotencyKey: string, + ): StorageIdempotencyRecord | null; + + /** Persist an idempotency result for future replay. */ + storeIdempotencyResult( + documentSlug: string, + route: string, + idempotencyKey: string, + response: Record, + requestHash?: string | null, + options?: { statusCode?: number; tombstoneRevision?: number | null }, + ): void; + + // -- Access control (simplified) ----------------------------------------- + + /** Resolve the access role for a presented secret. */ + resolveDocumentAccessRole(slug: string, presentedSecret: string): ShareRole | null; +} diff --git a/src/editor/index.ts b/src/editor/index.ts index 2e1fe65..3049839 100644 --- a/src/editor/index.ts +++ b/src/editor/index.ts @@ -1633,6 +1633,26 @@ class ProofEditorImpl implements ProofEditor { return 'Anonymous'; } + private async renameShareViewer(): Promise { + const initialValue = this.shareViewerName ?? getViewerName() ?? this.deriveDefaultShareViewerName(); + try { + const name = await promptForName({ force: true, initialValue }); + const resolvedName = typeof name === 'string' && name.trim().length > 0 + ? name.trim() + : this.deriveDefaultShareViewerName(); + this.shareViewerName = resolvedName; + setCurrentActorValue(`human:${resolvedName}`); + shareClient.setViewerName(resolvedName); + collabClient.setLocalUser( + { name: resolvedName }, + shareClient.getSlug() ?? undefined, + ); + this.updateShareBannerPresenceDisplay(); + } catch (error) { + console.warn('[share] rename display name failed', error); + } + } + activateShareRuntime(options?: ShareRuntimeActivationOptions): boolean { if (this.shareRuntimeActivationInFlight) return false; const hasShareConfig = shareClient.refreshRuntimeConfig(); @@ -4097,6 +4117,9 @@ class ProofEditorImpl implements ProofEditor { addItem('Copy link', async () => this.copyLinkWithFallback(this.getCanonicalShareUrl())); addDivider(); + addActionItem('Change display name', () => { + void this.renameShareViewer(); + }); addActionItem('View activity', () => this.openShareActivityModal()); container.appendChild(menu); diff --git a/src/ui/name-prompt.ts b/src/ui/name-prompt.ts index 69cfb2d..ea905de 100644 --- a/src/ui/name-prompt.ts +++ b/src/ui/name-prompt.ts @@ -26,13 +26,17 @@ export function setViewerName(name: string): void { localStorage.setItem(STORAGE_KEY, name); } +export function clearViewerName(): void { + localStorage.removeItem(STORAGE_KEY); +} + /** * Show name prompt modal if no name is stored. * Returns the viewer's name (from storage or newly entered). */ -export function promptForName(): Promise { +export function promptForName(options?: { force?: boolean; initialValue?: string }): Promise { const existing = getViewerName(); - if (existing) return Promise.resolve(existing); + if (existing && !options?.force) return Promise.resolve(existing); return new Promise((resolve) => { if (!document.body) { @@ -78,6 +82,7 @@ export function promptForName(): Promise { input.autocomplete = 'name'; input.autocapitalize = 'words'; input.enterKeyHint = 'done'; + input.value = options?.initialValue ?? existing ?? ''; input.style.cssText = ` width: 100%; padding: 10px 14px; border: 1px solid #e0e0e0; border-radius: 10px; font-size: 16px; outline: none;