Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions src/audit.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,8 @@ function seed(opts: {
};

db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, 1, ?, ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', 1, ?, ?, ?)`,
).run(PIPELINE_ID, JSON.stringify(def), NOW, NOW);
db.prepare(
`INSERT INTO runs (id, pipeline_id, pipeline_version, status,
Expand Down Expand Up @@ -426,8 +426,8 @@ describe("audit-log stress: 1000 writes don't bloat the DB", () => {
runMigrations(db);
try {
db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, 1, '{}', ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', 1, '{}', ?, ?)`,
).run(PIPELINE_ID, NOW, NOW);
db.prepare(
`INSERT INTO runs (id, pipeline_id, pipeline_version, status,
Expand Down
6 changes: 5 additions & 1 deletion src/db/migrations.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,11 @@ describe("runMigrations", () => {
expect(byName.get("def_json")?.notnull).toBe(1);
expect(byName.get("created_at")?.notnull).toBe(1);
expect(byName.get("updated_at")?.notnull).toBe(1);
expect(byName.get("publisher_id")?.notnull).toBe(1);
// publisher_id is schema-level NULL-able for ALTER-on-existing-rows
// compat (SQLite forbids `ALTER ADD COLUMN ... REFERENCES` with a
// non-NULL DEFAULT when rows exist). NULL is ruled out by the
// application invariant; see schema.md.
expect(byName.get("publisher_id")?.notnull).toBe(0);
expect(byName.get("publisher_id")?.type).toBe("TEXT");
});

Expand Down
38 changes: 28 additions & 10 deletions src/db/migrations/0002_publishers_and_tokens.sql
Original file line number Diff line number Diff line change
Expand Up @@ -141,19 +141,37 @@ VALUES (
);

-- ---------------------------------------------------------------------------
-- Add publisher_id to pipelines. The constant DEFAULT back-fills existing
-- rows to the demo seed; the FK is enforced going forward by the
-- foreign_keys=ON pragma applied on every connection in openDb.
-- Add publisher_id to pipelines.
--
-- SQLite limitation: ALTER TABLE ADD COLUMN with REFERENCES does not
-- retroactively validate the FK against existing rows — but the constant
-- DEFAULT we use ('pub_demo_seed') matches the row inserted above, so a
-- post-migration foreign_key_check is expected to return zero violations.
-- Tests assert this.
-- **SQLite caveat (cost of the M1 deploy outage on first attempt):**
-- when `foreign_keys=ON` AND any existing row exists in `pipelines`,
-- SQLite refuses to ALTER ADD COLUMN with a `REFERENCES` clause AND a
-- non-NULL DEFAULT in one shot — the rule is "REFERENCES added via
-- ALTER must default to NULL". Locally with a fresh `:memory:` table
-- (no rows) the ALTER passes; in production the row from the previous
-- demo deploy makes it fail with `Cannot add a REFERENCES column with
-- non-NULL default value`. See SQLite docs §"ALTER TABLE ADD COLUMN".
--
-- Workaround: add the column NULLable with a NULL default, then
-- back-fill in a separate UPDATE. The FK is enforced going forward
-- by the `foreign_keys=ON` pragma. The application layer (the INSERT
-- in `mountPipelineRoutes`) always supplies `publisher_id` from
-- `c.var.publisher_id`, so a NULL never appears in any post-migration
-- write — but the schema-level NOT NULL guarantee is traded for one
-- that lives at the application layer.
--
-- A future migration can rebuild the table to recover the NOT NULL
-- guarantee (table-rebuild requires `PRAGMA foreign_keys=OFF` outside
-- the transaction; the current migration runner wraps each file in a
-- single BEGIN IMMEDIATE / COMMIT, so the rebuild is its own concern
-- when a migration runner that toggles FKs lands).
-- ---------------------------------------------------------------------------
ALTER TABLE pipelines
ADD COLUMN publisher_id TEXT NOT NULL DEFAULT 'pub_demo_seed'
REFERENCES publishers(id);
ADD COLUMN publisher_id TEXT REFERENCES publishers(id);

UPDATE pipelines
SET publisher_id = 'pub_demo_seed'
WHERE publisher_id IS NULL;

CREATE INDEX idx_pipelines_publisher
ON pipelines(publisher_id);
2 changes: 1 addition & 1 deletion src/db/schema.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ and for live subcommand-endpoint resolution (§3.3).
| `def_json` | `TEXT NOT NULL` | NO | Validated pipeline-def document. |
| `created_at` | `TEXT NOT NULL` | NO | First insertion. |
| `updated_at` | `TEXT NOT NULL` | NO | Most recent upsert. |
| `publisher_id` | `TEXT NOT NULL` | NO | FK → `publishers.id`. Added by 0002. Defaults to `pub_demo_seed` for back-filled rows. |
| `publisher_id` | `TEXT` | YES (schema-level) | FK → `publishers.id`. Added by 0002. Back-filled to `pub_demo_seed` for existing rows. **NULL is forbidden by application invariant** — `mountPipelineRoutes` always supplies `publisher_id` from `c.var.publisher_id`. SQLite's `ALTER ADD COLUMN ... REFERENCES` with a non-NULL DEFAULT is rejected when existing rows are present (`foreign_keys=ON`); a future table-rebuild migration can lift this to schema-level NOT NULL once the migration runner supports `PRAGMA foreign_keys` toggle. |

Foreign key: `publisher_id` REFERENCES `publishers(id)`.

Expand Down
4 changes: 2 additions & 2 deletions src/integration/full-flow.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,8 @@ async function startHarness(
// gate at registration is sidestepped.
const now = new Date().toISOString();
db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', ?, ?, ?, ?)`,
).run(pipelineId, 1, JSON.stringify(pipelineDef), now, now);

return { db, app, mock, pipelineId, pipelineDef, webhookFirstAttempts };
Expand Down
8 changes: 4 additions & 4 deletions src/webhook.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,8 @@ function seedRun(opts: SeedOptions = {}): Database.Database {
runMigrations(db);

db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', ?, ?, ?, ?)`,
).run(PIPELINE_ID, 1, JSON.stringify(PIPELINE_DEF), SEED_NOW, SEED_NOW);

db.prepare(
Expand Down Expand Up @@ -521,8 +521,8 @@ describe("Webhook does not fire while spawned children are still pending", () =>
},
};
db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', ?, ?, ?, ?)`,
).run(PIPELINE_ID, 1, JSON.stringify(def), SEED_NOW, SEED_NOW);
db.prepare(
`INSERT INTO runs
Expand Down
11 changes: 8 additions & 3 deletions src/webhook_hmac.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,15 @@ beforeEach(() => {
runMigrations(db);
seedDemoPublisher(db, { MURMUR_TOKEN: TEST_BEARER });

// Seed pipeline + completed run with one done subtask + result.
// Seed pipeline + completed run with one done subtask + result. The
// pipeline row must carry `publisher_id = 'pub_demo_seed'` (the seed
// demo publisher) — migration 0002's `ALTER ADD COLUMN` adds the
// column NULL-able for ALTER-on-existing-rows compat (SQLite rejects
// non-NULL DEFAULT with REFERENCES); INSERTs that omit `publisher_id`
// would store NULL and break the webhook_signing JOIN below.
db.prepare(
`INSERT INTO pipelines (id, version, def_json, created_at, updated_at)
VALUES (?, 1, ?, ?, ?)`,
`INSERT INTO pipelines (id, publisher_id, version, def_json, created_at, updated_at)
VALUES (?, 'pub_demo_seed', 1, ?, ?, ?)`,
).run(PIPELINE_ID, JSON.stringify(PIPELINE_DEF), SEED_NOW, SEED_NOW);
db.prepare(
`INSERT INTO runs (id, pipeline_id, pipeline_version, status,
Expand Down
Loading