diff --git a/convex/httpApiV1.handlers.test.ts b/convex/httpApiV1.handlers.test.ts index d5e8c8108..b34b1c6e9 100644 --- a/convex/httpApiV1.handlers.test.ts +++ b/convex/httpApiV1.handlers.test.ts @@ -1,54 +1,54 @@ /* @vitest-environment node */ -import { beforeEach, describe, expect, it, vi } from 'vitest' +import { beforeEach, describe, expect, it, vi } from "vitest"; -vi.mock('./lib/apiTokenAuth', () => ({ +vi.mock("./lib/apiTokenAuth", () => ({ requireApiTokenUser: vi.fn(), getOptionalApiTokenUserId: vi.fn(), -})) +})); -vi.mock('./skills', () => ({ +vi.mock("./skills", () => ({ publishVersionForUser: vi.fn(), -})) +})); -const { getOptionalApiTokenUserId, requireApiTokenUser } = await import('./lib/apiTokenAuth') -const { publishVersionForUser } = await import('./skills') -const { __handlers } = await import('./httpApiV1') +const { getOptionalApiTokenUserId, requireApiTokenUser } = await import("./lib/apiTokenAuth"); +const { publishVersionForUser } = await import("./skills"); +const { __handlers } = await import("./httpApiV1"); -type ActionCtx = import('./_generated/server').ActionCtx +type ActionCtx = import("./_generated/server").ActionCtx; -type RateLimitArgs = { key: string; limit: number; windowMs: number } +type RateLimitArgs = { key: string; limit: number; windowMs: number }; function isRateLimitArgs(args: unknown): args is RateLimitArgs { - if (!args || typeof args !== 'object') return false - const value = args as Record + if (!args || typeof args !== "object") return false; + const value = args as Record; return ( - typeof value.key === 'string' && - typeof value.limit === 'number' && - typeof value.windowMs === 'number' - ) + typeof value.key === "string" && + typeof value.limit === "number" && + typeof value.windowMs === "number" + ); } function hasSlugArgs(args: unknown): args is { slug: string } { - if (!args || typeof args !== 'object') return false - const value = args as Record - return typeof value.slug === 'string' + if (!args || typeof args !== "object") return false; + const value = args as Record; + return typeof value.slug === "string"; } function makeCtx(partial: Record) { const partialRunQuery = - typeof partial.runQuery === 'function' + typeof partial.runQuery === "function" ? (partial.runQuery as (query: unknown, args: Record) => unknown) - : null + : null; const runQuery = vi.fn(async (query: unknown, args: Record) => { - if (isRateLimitArgs(args)) return okRate() - return partialRunQuery ? await partialRunQuery(query, args) : null - }) + if (isRateLimitArgs(args)) return okRate(); + return partialRunQuery ? await partialRunQuery(query, args) : null; + }); const runMutation = - typeof partial.runMutation === 'function' + typeof partial.runMutation === "function" ? partial.runMutation - : vi.fn().mockResolvedValue(okRate()) + : vi.fn().mockResolvedValue(okRate()); - return { ...partial, runQuery, runMutation } as unknown as ActionCtx + return { ...partial, runQuery, runMutation } as unknown as ActionCtx; } const okRate = () => ({ @@ -56,672 +56,742 @@ const okRate = () => ({ remaining: 10, limit: 100, resetAt: Date.now() + 60_000, -}) +}); const blockedRate = () => ({ allowed: false, remaining: 0, limit: 100, resetAt: Date.now() + 60_000, -}) +}); beforeEach(() => { - vi.mocked(getOptionalApiTokenUserId).mockReset() - vi.mocked(getOptionalApiTokenUserId).mockResolvedValue(null) - vi.mocked(requireApiTokenUser).mockReset() - vi.mocked(publishVersionForUser).mockReset() -}) - -describe('httpApiV1 handlers', () => { - it('search returns empty results for blank query', async () => { - const runAction = vi.fn() - const runMutation = vi.fn().mockResolvedValue(okRate()) + vi.mocked(getOptionalApiTokenUserId).mockReset(); + vi.mocked(getOptionalApiTokenUserId).mockResolvedValue(null); + vi.mocked(requireApiTokenUser).mockReset(); + vi.mocked(publishVersionForUser).mockReset(); +}); + +describe("httpApiV1 handlers", () => { + it("search returns empty results for blank query", async () => { + const runAction = vi.fn(); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.searchSkillsV1Handler( makeCtx({ runAction, runMutation }), - new Request('https://example.com/api/v1/search?q=%20%20'), - ) + new Request("https://example.com/api/v1/search?q=%20%20"), + ); if (response.status !== 200) { - throw new Error(await response.text()) + throw new Error(await response.text()); } - expect(await response.json()).toEqual({ results: [] }) - expect(runAction).not.toHaveBeenCalled() - }) - - it('users/restore forbids non-admin api tokens', async () => { - const runQuery = vi.fn() - const runAction = vi.fn() - const runMutation = vi.fn().mockResolvedValue(okRate()) + expect(await response.json()).toEqual({ results: [] }); + expect(runAction).not.toHaveBeenCalled(); + }); + + it("users/restore forbids non-admin api tokens", async () => { + const runQuery = vi.fn(); + const runAction = vi.fn(); + const runMutation = vi.fn().mockResolvedValue(okRate()); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:actor', - user: { _id: 'users:actor', role: 'user' }, - } as never) + userId: "users:actor", + user: { _id: "users:actor", role: "user" }, + } as never); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runAction, runMutation }), - new Request('https://example.com/api/v1/users/restore', { - method: 'POST', - body: JSON.stringify({ handle: 'target', slugs: ['a'] }), + new Request("https://example.com/api/v1/users/restore", { + method: "POST", + body: JSON.stringify({ handle: "target", slugs: ["a"] }), }), - ) - expect(response.status).toBe(403) - expect(runQuery).not.toHaveBeenCalled() - expect(runAction).not.toHaveBeenCalled() - }) - - it('users/restore calls restore action for admin', async () => { - const runAction = vi.fn().mockResolvedValue({ ok: true, totalRestored: 1, results: [] }) + ); + expect(response.status).toBe(403); + expect(runQuery).not.toHaveBeenCalled(); + expect(runAction).not.toHaveBeenCalled(); + }); + + it("users/restore calls restore action for admin", async () => { + const runAction = vi.fn().mockResolvedValue({ ok: true, totalRestored: 1, results: [] }); const runMutation = vi.fn(async (_mutation: unknown, args: Record) => { - if (isRateLimitArgs(args)) return okRate() - return { ok: true } - }) + if (isRateLimitArgs(args)) return okRate(); + return { ok: true }; + }); const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('handle' in args) return { _id: 'users:target' } - return null - }) + if ("handle" in args) return { _id: "users:target" }; + return null; + }); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:admin', - user: { _id: 'users:admin', role: 'admin' }, - } as never) + userId: "users:admin", + user: { _id: "users:admin", role: "admin" }, + } as never); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runAction, runMutation }), - new Request('https://example.com/api/v1/users/restore', { - method: 'POST', + new Request("https://example.com/api/v1/users/restore", { + method: "POST", body: JSON.stringify({ - handle: 'Target', - slugs: ['a', 'b'], + handle: "Target", + slugs: ["a", "b"], forceOverwriteSquatter: true, }), }), - ) - if (response.status !== 200) throw new Error(await response.text()) + ); + if (response.status !== 200) throw new Error(await response.text()); expect(runAction).toHaveBeenCalledWith(expect.anything(), { - actorUserId: 'users:admin', - ownerHandle: 'target', - ownerUserId: 'users:target', - slugs: ['a', 'b'], + actorUserId: "users:admin", + ownerHandle: "target", + ownerUserId: "users:target", + slugs: ["a", "b"], forceOverwriteSquatter: true, - }) - }) + }); + }); - it('users/reclaim forbids non-admin api tokens', async () => { - const runQuery = vi.fn() - const runAction = vi.fn() - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("users/reclaim forbids non-admin api tokens", async () => { + const runQuery = vi.fn(); + const runAction = vi.fn(); + const runMutation = vi.fn().mockResolvedValue(okRate()); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:actor', - user: { _id: 'users:actor', role: 'user' }, - } as never) + userId: "users:actor", + user: { _id: "users:actor", role: "user" }, + } as never); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runAction, runMutation }), - new Request('https://example.com/api/v1/users/reclaim', { - method: 'POST', - body: JSON.stringify({ handle: 'target', slugs: ['a'] }), + new Request("https://example.com/api/v1/users/reclaim", { + method: "POST", + body: JSON.stringify({ handle: "target", slugs: ["a"] }), }), - ) - expect(response.status).toBe(403) - expect(runQuery).not.toHaveBeenCalled() - }) + ); + expect(response.status).toBe(403); + expect(runQuery).not.toHaveBeenCalled(); + }); - it('users/reclaim calls reclaim mutation for admin', async () => { + it("users/reclaim calls reclaim mutation for admin", async () => { const runMutation = vi.fn(async (_mutation: unknown, args: Record) => { - if (isRateLimitArgs(args)) return okRate() - return { ok: true } - }) + if (isRateLimitArgs(args)) return okRate(); + return { ok: true }; + }); const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('handle' in args) return { _id: 'users:target' } - return null - }) + if ("handle" in args) return { _id: "users:target" }; + return null; + }); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:admin', - user: { _id: 'users:admin', role: 'admin' }, - } as never) + userId: "users:admin", + user: { _id: "users:admin", role: "admin" }, + } as never); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runAction: vi.fn(), runMutation }), - new Request('https://example.com/api/v1/users/reclaim', { - method: 'POST', - body: JSON.stringify({ handle: 'Target', slugs: [' A ', 'b'], reason: 'r' }), + new Request("https://example.com/api/v1/users/reclaim", { + method: "POST", + body: JSON.stringify({ handle: "Target", slugs: [" A ", "b"], reason: "r" }), }), - ) - if (response.status !== 200) throw new Error(await response.text()) + ); + if (response.status !== 200) throw new Error(await response.text()); - const reclaimCalls = runMutation.mock.calls.filter(([, args]) => hasSlugArgs(args)) - expect(reclaimCalls).toHaveLength(2) + const reclaimCalls = runMutation.mock.calls.filter(([, args]) => hasSlugArgs(args)); + expect(reclaimCalls).toHaveLength(2); expect(reclaimCalls[0]?.[1]).toMatchObject({ - actorUserId: 'users:admin', - slug: 'a', - rightfulOwnerUserId: 'users:target', - reason: 'r', - }) + actorUserId: "users:admin", + slug: "a", + rightfulOwnerUserId: "users:target", + reason: "r", + }); expect(reclaimCalls[1]?.[1]).toMatchObject({ - actorUserId: 'users:admin', - slug: 'b', - rightfulOwnerUserId: 'users:target', - reason: 'r', - }) - }) - - it('search forwards limit and highlightedOnly', async () => { + actorUserId: "users:admin", + slug: "b", + rightfulOwnerUserId: "users:target", + reason: "r", + }); + }); + + it("search forwards limit and highlightedOnly", async () => { const runAction = vi.fn().mockResolvedValue([ { score: 1, - skill: { slug: 'a', displayName: 'A', summary: null, updatedAt: 1 }, - version: { version: '1.0.0' }, + skill: { slug: "a", displayName: "A", summary: null, updatedAt: 1 }, + version: { version: "1.0.0" }, }, - ]) - const runMutation = vi.fn().mockResolvedValue(okRate()) + ]); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.searchSkillsV1Handler( makeCtx({ runAction, runMutation }), - new Request('https://example.com/api/v1/search?q=test&limit=5&highlightedOnly=true'), - ) + new Request("https://example.com/api/v1/search?q=test&limit=5&highlightedOnly=true"), + ); if (response.status !== 200) { - throw new Error(await response.text()) + throw new Error(await response.text()); } expect(runAction).toHaveBeenCalledWith(expect.anything(), { - query: 'test', + query: "test", limit: 5, highlightedOnly: true, - }) - }) + }); + }); - it('search rate limits', async () => { - const runMutation = vi.fn().mockResolvedValue(blockedRate()) + it("search rate limits", async () => { + const runMutation = vi.fn().mockResolvedValue(blockedRate()); const response = await __handlers.searchSkillsV1Handler( makeCtx({ runAction: vi.fn(), runMutation }), - new Request('https://example.com/api/v1/search?q=test'), - ) - expect(response.status).toBe(429) - }) + new Request("https://example.com/api/v1/search?q=test"), + ); + expect(response.status).toBe(429); + }); - it('resolve validates hash', async () => { - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("resolve validates hash", async () => { + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.resolveSkillVersionV1Handler( makeCtx({ runQuery: vi.fn(), runMutation }), - new Request('https://example.com/api/v1/resolve?slug=demo&hash=bad'), - ) - expect(response.status).toBe(400) - }) - - it('resolve returns 404 when missing', async () => { - const runQuery = vi.fn().mockResolvedValue(null) - const runMutation = vi.fn().mockResolvedValue(okRate()) + new Request("https://example.com/api/v1/resolve?slug=demo&hash=bad"), + ); + expect(response.status).toBe(400); + }); + + it("resolve returns 404 when missing", async () => { + const runQuery = vi.fn().mockResolvedValue(null); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.resolveSkillVersionV1Handler( makeCtx({ runQuery, runMutation }), new Request( - 'https://example.com/api/v1/resolve?slug=demo&hash=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + "https://example.com/api/v1/resolve?slug=demo&hash=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", ), - ) - expect(response.status).toBe(404) - }) + ); + expect(response.status).toBe(404); + }); - it('resolve returns match and latestVersion', async () => { + it("resolve returns match and latestVersion", async () => { const runQuery = vi.fn().mockResolvedValue({ - match: { version: '1.0.0' }, - latestVersion: { version: '2.0.0' }, - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + match: { version: "1.0.0" }, + latestVersion: { version: "2.0.0" }, + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.resolveSkillVersionV1Handler( makeCtx({ runQuery, runMutation }), new Request( - 'https://example.com/api/v1/resolve?slug=demo&hash=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', + "https://example.com/api/v1/resolve?slug=demo&hash=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", ), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.match.version).toBe('1.0.0') - }) + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.match.version).toBe("1.0.0"); + }); - it('lists skills with resolved tags using batch query', async () => { + it("lists skills with resolved tags using batch query", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('cursor' in args || 'limit' in args) { + if ("cursor" in args || "limit" in args) { return { items: [ { skill: { - _id: 'skills:1', - slug: 'demo', - displayName: 'Demo', - summary: 's', - tags: { latest: 'versions:1' }, + _id: "skills:1", + slug: "demo", + displayName: "Demo", + summary: "s", + tags: { latest: "versions:1" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '1.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "1.0.0", createdAt: 3, changelog: "c" }, }, ], nextCursor: null, - } + }; } // Batch query: versionIds (plural) - if ('versionIds' in args) { - return [{ _id: 'versions:1', version: '1.0.0', softDeletedAt: undefined }] + if ("versionIds" in args) { + return [{ _id: "versions:1", version: "1.0.0", softDeletedAt: undefined }]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.listSkillsV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills?limit=1'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.items[0].tags.latest).toBe('1.0.0') - }) - - it('batches tag resolution across multiple skills into single query', async () => { + new Request("https://example.com/api/v1/skills?limit=1"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.items[0].tags.latest).toBe("1.0.0"); + }); + + it("batches tag resolution across multiple skills into single query", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('cursor' in args || 'limit' in args) { + if ("cursor" in args || "limit" in args) { return { items: [ { skill: { - _id: 'skills:1', - slug: 'skill-a', - displayName: 'Skill A', - summary: 's', - tags: { latest: 'versions:1', stable: 'versions:2' }, + _id: "skills:1", + slug: "skill-a", + displayName: "Skill A", + summary: "s", + tags: { latest: "versions:1", stable: "versions:2" }, stats: { downloads: 0, stars: 0, versions: 2, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '2.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "2.0.0", createdAt: 3, changelog: "c" }, }, { skill: { - _id: 'skills:2', - slug: 'skill-b', - displayName: 'Skill B', - summary: 's', - tags: { latest: 'versions:3' }, + _id: "skills:2", + slug: "skill-b", + displayName: "Skill B", + summary: "s", + tags: { latest: "versions:3" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '1.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "1.0.0", createdAt: 3, changelog: "c" }, }, ], nextCursor: null, - } + }; } // Batch query should receive all version IDs from all skills - if ('versionIds' in args) { - const ids = args.versionIds as string[] - expect(ids).toHaveLength(3) - expect(ids).toContain('versions:1') - expect(ids).toContain('versions:2') - expect(ids).toContain('versions:3') + if ("versionIds" in args) { + const ids = args.versionIds as string[]; + expect(ids).toHaveLength(3); + expect(ids).toContain("versions:1"); + expect(ids).toContain("versions:2"); + expect(ids).toContain("versions:3"); return [ - { _id: 'versions:1', version: '2.0.0', softDeletedAt: undefined }, - { _id: 'versions:2', version: '1.0.0', softDeletedAt: undefined }, - { _id: 'versions:3', version: '1.0.0', softDeletedAt: undefined }, - ] + { _id: "versions:1", version: "2.0.0", softDeletedAt: undefined }, + { _id: "versions:2", version: "1.0.0", softDeletedAt: undefined }, + { _id: "versions:3", version: "1.0.0", softDeletedAt: undefined }, + ]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.listSkillsV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills'), - ) - expect(response.status).toBe(200) - const json = await response.json() + new Request("https://example.com/api/v1/skills"), + ); + expect(response.status).toBe(200); + const json = await response.json(); // Verify tags are correctly resolved for each skill - expect(json.items[0].tags.latest).toBe('2.0.0') - expect(json.items[0].tags.stable).toBe('1.0.0') - expect(json.items[1].tags.latest).toBe('1.0.0') + expect(json.items[0].tags.latest).toBe("2.0.0"); + expect(json.items[0].tags.stable).toBe("1.0.0"); + expect(json.items[1].tags.latest).toBe("1.0.0"); // Verify batch query was called exactly once (not per-tag) const batchCalls = runQuery.mock.calls.filter( - ([, args]) => args && 'versionIds' in (args as Record), - ) - expect(batchCalls).toHaveLength(1) - }) + ([, args]) => args && "versionIds" in (args as Record), + ); + expect(batchCalls).toHaveLength(1); + }); - it('lists souls with resolved tags using batch query', async () => { + it("lists souls with resolved tags using batch query", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('cursor' in args || 'limit' in args) { + if ("cursor" in args || "limit" in args) { return { items: [ { soul: { - _id: 'souls:1', - slug: 'demo-soul', - displayName: 'Demo Soul', - summary: 's', - tags: { latest: 'soulVersions:1' }, + _id: "souls:1", + slug: "demo-soul", + displayName: "Demo Soul", + summary: "s", + tags: { latest: "soulVersions:1" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '1.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "1.0.0", createdAt: 3, changelog: "c" }, }, ], nextCursor: null, - } + }; } - if ('versionIds' in args) { - return [{ _id: 'soulVersions:1', version: '1.0.0', softDeletedAt: undefined }] + if ("versionIds" in args) { + return [{ _id: "soulVersions:1", version: "1.0.0", softDeletedAt: undefined }]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.listSoulsV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/souls?limit=1'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.items[0].tags.latest).toBe('1.0.0') - }) - - it('batches tag resolution across multiple souls into single query', async () => { + new Request("https://example.com/api/v1/souls?limit=1"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.items[0].tags.latest).toBe("1.0.0"); + }); + + it("batches tag resolution across multiple souls into single query", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('cursor' in args || 'limit' in args) { + if ("cursor" in args || "limit" in args) { return { items: [ { soul: { - _id: 'souls:1', - slug: 'soul-a', - displayName: 'Soul A', - summary: 's', - tags: { latest: 'soulVersions:1', stable: 'soulVersions:2' }, + _id: "souls:1", + slug: "soul-a", + displayName: "Soul A", + summary: "s", + tags: { latest: "soulVersions:1", stable: "soulVersions:2" }, stats: { downloads: 0, stars: 0, versions: 2, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '2.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "2.0.0", createdAt: 3, changelog: "c" }, }, { soul: { - _id: 'souls:2', - slug: 'soul-b', - displayName: 'Soul B', - summary: 's', - tags: { latest: 'soulVersions:3' }, + _id: "souls:2", + slug: "soul-b", + displayName: "Soul B", + summary: "s", + tags: { latest: "soulVersions:3" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '1.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "1.0.0", createdAt: 3, changelog: "c" }, }, ], nextCursor: null, - } + }; } - if ('versionIds' in args) { - const ids = args.versionIds as string[] - expect(ids).toHaveLength(3) - expect(ids).toContain('soulVersions:1') - expect(ids).toContain('soulVersions:2') - expect(ids).toContain('soulVersions:3') + if ("versionIds" in args) { + const ids = args.versionIds as string[]; + expect(ids).toHaveLength(3); + expect(ids).toContain("soulVersions:1"); + expect(ids).toContain("soulVersions:2"); + expect(ids).toContain("soulVersions:3"); return [ - { _id: 'soulVersions:1', version: '2.0.0', softDeletedAt: undefined }, - { _id: 'soulVersions:2', version: '1.0.0', softDeletedAt: undefined }, - { _id: 'soulVersions:3', version: '1.0.0', softDeletedAt: undefined }, - ] + { _id: "soulVersions:1", version: "2.0.0", softDeletedAt: undefined }, + { _id: "soulVersions:2", version: "1.0.0", softDeletedAt: undefined }, + { _id: "soulVersions:3", version: "1.0.0", softDeletedAt: undefined }, + ]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.listSoulsV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/souls'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.items[0].tags.latest).toBe('2.0.0') - expect(json.items[0].tags.stable).toBe('1.0.0') - expect(json.items[1].tags.latest).toBe('1.0.0') + new Request("https://example.com/api/v1/souls"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.items[0].tags.latest).toBe("2.0.0"); + expect(json.items[0].tags.stable).toBe("1.0.0"); + expect(json.items[1].tags.latest).toBe("1.0.0"); const batchCalls = runQuery.mock.calls.filter( - ([, args]) => args && 'versionIds' in (args as Record), - ) - expect(batchCalls).toHaveLength(1) - }) + ([, args]) => args && "versionIds" in (args as Record), + ); + expect(batchCalls).toHaveLength(1); + }); - it('souls get resolves tags using batch query', async () => { + it("souls get resolves tags using batch query", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { + if ("slug" in args) { return { soul: { - _id: 'souls:1', - slug: 'demo-soul', - displayName: 'Demo Soul', - summary: 's', - tags: { latest: 'soulVersions:1' }, + _id: "souls:1", + slug: "demo-soul", + displayName: "Demo Soul", + summary: "s", + tags: { latest: "soulVersions:1" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, - latestVersion: { version: '1.0.0', createdAt: 3, changelog: 'c' }, + latestVersion: { version: "1.0.0", createdAt: 3, changelog: "c" }, owner: null, - } + }; } - if ('versionIds' in args) { - return [{ _id: 'soulVersions:1', version: '1.0.0', softDeletedAt: undefined }] + if ("versionIds" in args) { + return [{ _id: "soulVersions:1", version: "1.0.0", softDeletedAt: undefined }]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.soulsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/souls/demo-soul'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.soul.tags.latest).toBe('1.0.0') - }) - - it('lists skills supports sort aliases', async () => { + new Request("https://example.com/api/v1/souls/demo-soul"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.soul.tags.latest).toBe("1.0.0"); + }); + + it("lists skills supports sort aliases", async () => { const checks: Array<[string, string]> = [ - ['rating', 'stars'], - ['installs', 'installsCurrent'], - ['installs-all-time', 'installsAllTime'], - ['trending', 'trending'], - ] + ["rating", "stars"], + ["installs", "installsCurrent"], + ["installs-all-time", "installsAllTime"], + ["trending", "trending"], + ]; for (const [input, expected] of checks) { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('sort' in args || 'cursor' in args || 'limit' in args) { - expect(args.sort).toBe(expected) - return { items: [], nextCursor: null } + if ("sort" in args || "cursor" in args || "limit" in args) { + expect(args.sort).toBe(expected); + return { items: [], nextCursor: null }; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.listSkillsV1Handler( makeCtx({ runQuery, runMutation }), new Request(`https://example.com/api/v1/skills?sort=${input}`), - ) - expect(response.status).toBe(200) + ); + expect(response.status).toBe(200); } - }) + }); - it('get skill returns 404 when missing', async () => { - const runQuery = vi.fn().mockResolvedValue(null) - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("get skill returns 404 when missing", async () => { + const runQuery = vi.fn().mockResolvedValue(null); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/missing'), - ) - expect(response.status).toBe(404) - }) + new Request("https://example.com/api/v1/skills/missing"), + ); + expect(response.status).toBe(404); + }); - it('get skill returns pending-scan message for owner api token', async () => { - vi.mocked(getOptionalApiTokenUserId).mockResolvedValue('users:1' as never) + it("get skill returns pending-scan message for owner api token", async () => { + vi.mocked(getOptionalApiTokenUserId).mockResolvedValue("users:1" as never); const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { + if ("slug" in args) { return { - _id: 'skills:1', - slug: 'demo', - ownerUserId: 'users:1', - moderationStatus: 'hidden', - moderationReason: 'pending.scan', - } + _id: "skills:1", + slug: "demo", + ownerUserId: "users:1", + moderationStatus: "hidden", + moderationReason: "pending.scan", + }; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/demo'), - ) - expect(response.status).toBe(423) - expect(await response.text()).toContain('security scan is pending') - }) - - it('get skill returns undelete hint for owner soft-deleted skill', async () => { - vi.mocked(getOptionalApiTokenUserId).mockResolvedValue('users:1' as never) + new Request("https://example.com/api/v1/skills/demo"), + ); + expect(response.status).toBe(423); + expect(await response.text()).toContain("security scan is pending"); + }); + + it("get skill returns undelete hint for owner soft-deleted skill", async () => { + vi.mocked(getOptionalApiTokenUserId).mockResolvedValue("users:1" as never); const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { + if ("slug" in args) { return { - _id: 'skills:1', - slug: 'demo', - ownerUserId: 'users:1', + _id: "skills:1", + slug: "demo", + ownerUserId: "users:1", softDeletedAt: 1, - moderationStatus: 'hidden', - } + moderationStatus: "hidden", + }; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/demo'), - ) - expect(response.status).toBe(410) - expect(await response.text()).toContain('clawhub undelete demo') - }) + new Request("https://example.com/api/v1/skills/demo"), + ); + expect(response.status).toBe(410); + expect(await response.text()).toContain("clawhub undelete demo"); + }); - it('get skill returns payload', async () => { + it("get skill returns payload", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { + if ("slug" in args) { return { skill: { - _id: 'skills:1', - slug: 'demo', - displayName: 'Demo', - summary: 's', - tags: { latest: 'versions:1' }, + _id: "skills:1", + slug: "demo", + displayName: "Demo", + summary: "s", + ownerUserId: "users:1", + tags: { latest: "versions:1" }, stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, createdAt: 1, updatedAt: 2, }, latestVersion: { - version: '1.0.0', + version: "1.0.0", createdAt: 3, - changelog: 'c', + changelog: "c", files: [], }, - owner: { handle: 'p', displayName: 'Peter', image: null }, - } + owner: { handle: "p", displayName: "Peter", image: null }, + moderationInfo: { + isPendingScan: false, + isMalwareBlocked: false, + isSuspicious: true, + isHiddenByMod: false, + isRemoved: false, + verdict: "suspicious", + reasonCodes: ["suspicious.dangerous_exec"], + summary: "Detected: suspicious.dangerous_exec", + engineVersion: "v2.0.0", + updatedAt: 123, + }, + }; } // Batch query for tag resolution - if ('versionIds' in args) { - return [{ _id: 'versions:1', version: '1.0.0', softDeletedAt: undefined }] + if ("versionIds" in args) { + return [{ _id: "versions:1", version: "1.0.0", softDeletedAt: undefined }]; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/demo'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.skill.slug).toBe('demo') - expect(json.latestVersion.version).toBe('1.0.0') - }) - - it('lists versions', async () => { + new Request("https://example.com/api/v1/skills/demo"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.skill.slug).toBe("demo"); + expect(json.latestVersion.version).toBe("1.0.0"); + expect(json.moderation.verdict).toBe("suspicious"); + expect(json.moderation.reasonCodes).toEqual(["suspicious.dangerous_exec"]); + }); + + it("get skill moderation returns sanitized evidence for public callers", async () => { + vi.mocked(getOptionalApiTokenUserId).mockResolvedValue(undefined as never); const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { - return { _id: 'skills:1', slug: 'demo', displayName: 'Demo' } + if ("slug" in args) { + return { + skill: { + _id: "skills:1", + slug: "demo", + displayName: "Demo", + summary: "s", + ownerUserId: "users:1", + tags: { latest: "versions:1" }, + stats: { downloads: 0, stars: 0, versions: 1, comments: 0 }, + moderationEvidence: [ + { + code: "suspicious.dangerous_exec", + severity: "critical", + file: "index.ts", + line: 12, + message: "Shell command execution detected.", + evidence: 'exec("curl ...")', + }, + ], + createdAt: 1, + updatedAt: 2, + }, + latestVersion: null, + owner: { _id: "users:1", handle: "p", displayName: "Peter", image: null }, + moderationInfo: { + isPendingScan: false, + isMalwareBlocked: false, + isSuspicious: true, + isHiddenByMod: false, + isRemoved: false, + verdict: "suspicious", + reasonCodes: ["suspicious.dangerous_exec"], + summary: "Detected: suspicious.dangerous_exec", + engineVersion: "v2.0.0", + updatedAt: 123, + }, + }; } - if ('skillId' in args && 'cursor' in args) { + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); + const response = await __handlers.skillsGetRouterV1Handler( + makeCtx({ runQuery, runMutation }), + new Request("https://example.com/api/v1/skills/demo/moderation"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.moderation.reasonCodes).toEqual(["suspicious.dangerous_exec"]); + expect(json.moderation.evidence[0].evidence).toBe(""); + }); + + it("lists versions", async () => { + const runQuery = vi.fn(async (_query: unknown, args: Record) => { + if ("slug" in args) { + return { _id: "skills:1", slug: "demo", displayName: "Demo" }; + } + if ("skillId" in args && "cursor" in args) { return { items: [ { - version: '1.0.0', + version: "1.0.0", createdAt: 1, - changelog: 'c', - changelogSource: 'user', + changelog: "c", + changelogSource: "user", files: [], }, ], nextCursor: null, - } + }; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/demo/versions?limit=1'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.items[0].version).toBe('1.0.0') - }) - - it('returns version detail', async () => { + new Request("https://example.com/api/v1/skills/demo/versions?limit=1"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.items[0].version).toBe("1.0.0"); + }); + + it("returns version detail", async () => { const runQuery = vi.fn(async (_query: unknown, args: Record) => { - if ('slug' in args) { - return { _id: 'skills:1', slug: 'demo', displayName: 'Demo' } + if ("slug" in args) { + return { _id: "skills:1", slug: "demo", displayName: "Demo" }; } - if ('skillId' in args && 'version' in args) { + if ("skillId" in args && "version" in args) { return { - version: '1.0.0', + version: "1.0.0", createdAt: 1, - changelog: 'c', - changelogSource: 'auto', + changelog: "c", + changelogSource: "auto", files: [ { - path: 'SKILL.md', + path: "SKILL.md", size: 1, - storageId: 'storage:1', - sha256: 'abc', - contentType: 'text/plain', + storageId: "storage:1", + sha256: "abc", + contentType: "text/plain", }, ], - } + }; } - return null - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + return null; + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/skills/demo/versions/1.0.0'), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.version.files[0].path).toBe('SKILL.md') - }) - - it('returns raw file content', async () => { + new Request("https://example.com/api/v1/skills/demo/versions/1.0.0"), + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.version.files[0].path).toBe("SKILL.md"); + }); + + it("returns raw file content", async () => { const version = { - version: '1.0.0', + version: "1.0.0", createdAt: 1, - changelog: 'c', + changelog: "c", files: [ { - path: 'SKILL.md', + path: "SKILL.md", size: 5, - storageId: 'storage:1', - sha256: 'abcd', - contentType: 'text/plain', + storageId: "storage:1", + sha256: "abcd", + contentType: "text/plain", }, ], softDeletedAt: undefined, - } + }; const runQuery = vi.fn().mockResolvedValue({ skill: { - _id: 'skills:1', - slug: 'demo', - displayName: 'Demo', - summary: 's', + _id: "skills:1", + slug: "demo", + displayName: "Demo", + summary: "s", tags: {}, stats: {}, createdAt: 1, @@ -729,42 +799,42 @@ describe('httpApiV1 handlers', () => { }, latestVersion: version, owner: null, - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const storage = { - get: vi.fn().mockResolvedValue(new Blob(['hello'], { type: 'text/plain' })), - } + get: vi.fn().mockResolvedValue(new Blob(["hello"], { type: "text/plain" })), + }; const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation, storage }), - new Request('https://example.com/api/v1/skills/demo/file?path=SKILL.md'), - ) - expect(response.status).toBe(200) - expect(await response.text()).toBe('hello') - expect(response.headers.get('X-Content-SHA256')).toBe('abcd') - }) - - it('returns 413 when raw file too large', async () => { + new Request("https://example.com/api/v1/skills/demo/file?path=SKILL.md"), + ); + expect(response.status).toBe(200); + expect(await response.text()).toBe("hello"); + expect(response.headers.get("X-Content-SHA256")).toBe("abcd"); + }); + + it("returns 413 when raw file too large", async () => { const version = { - version: '1.0.0', + version: "1.0.0", createdAt: 1, - changelog: 'c', + changelog: "c", files: [ { - path: 'SKILL.md', + path: "SKILL.md", size: 210 * 1024, - storageId: 'storage:1', - sha256: 'abcd', - contentType: 'text/plain', + storageId: "storage:1", + sha256: "abcd", + contentType: "text/plain", }, ], softDeletedAt: undefined, - } + }; const runQuery = vi.fn().mockResolvedValue({ skill: { - _id: 'skills:1', - slug: 'demo', - displayName: 'Demo', - summary: 's', + _id: "skills:1", + slug: "demo", + displayName: "Demo", + summary: "s", tags: {}, stats: {}, createdAt: 1, @@ -772,377 +842,377 @@ describe('httpApiV1 handlers', () => { }, latestVersion: version, owner: null, - }) - const runMutation = vi.fn().mockResolvedValue(okRate()) + }); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsGetRouterV1Handler( makeCtx({ runQuery, runMutation, storage: { get: vi.fn() } }), - new Request('https://example.com/api/v1/skills/demo/file?path=SKILL.md'), - ) - expect(response.status).toBe(413) - }) + new Request("https://example.com/api/v1/skills/demo/file?path=SKILL.md"), + ); + expect(response.status).toBe(413); + }); - it('publish json succeeds', async () => { + it("publish json succeeds", async () => { vi.mocked(requireApiTokenUser).mockResolvedValueOnce({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); vi.mocked(publishVersionForUser).mockResolvedValueOnce({ - skillId: 's', - versionId: 'v', - embeddingId: 'e', - } as never) - const runMutation = vi.fn().mockResolvedValue(okRate()) + skillId: "s", + versionId: "v", + embeddingId: "e", + } as never); + const runMutation = vi.fn().mockResolvedValue(okRate()); const body = JSON.stringify({ - slug: 'demo', - displayName: 'Demo', - version: '1.0.0', - changelog: 'c', + slug: "demo", + displayName: "Demo", + version: "1.0.0", + changelog: "c", files: [ { - path: 'SKILL.md', + path: "SKILL.md", size: 1, - storageId: 'storage:1', - sha256: 'abc', - contentType: 'text/plain', + storageId: "storage:1", + sha256: "abc", + contentType: "text/plain", }, ], - }) + }); const response = await __handlers.publishSkillV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills', { - method: 'POST', - headers: { 'Content-Type': 'application/json', Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/skills", { + method: "POST", + headers: { "Content-Type": "application/json", Authorization: "Bearer clh_test" }, body, }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.ok).toBe(true) - expect(publishVersionForUser).toHaveBeenCalled() - }) - - it('publish multipart succeeds', async () => { + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.ok).toBe(true); + expect(publishVersionForUser).toHaveBeenCalled(); + }); + + it("publish multipart succeeds", async () => { vi.mocked(requireApiTokenUser).mockResolvedValueOnce({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); vi.mocked(publishVersionForUser).mockResolvedValueOnce({ - skillId: 's', - versionId: 'v', - embeddingId: 'e', - } as never) - const runMutation = vi.fn().mockResolvedValue(okRate()) - const form = new FormData() + skillId: "s", + versionId: "v", + embeddingId: "e", + } as never); + const runMutation = vi.fn().mockResolvedValue(okRate()); + const form = new FormData(); form.set( - 'payload', + "payload", JSON.stringify({ - slug: 'demo', - displayName: 'Demo', - version: '1.0.0', - changelog: '', - tags: ['latest'], + slug: "demo", + displayName: "Demo", + version: "1.0.0", + changelog: "", + tags: ["latest"], }), - ) - form.append('files', new Blob(['hello'], { type: 'text/plain' }), 'SKILL.md') + ); + form.append("files", new Blob(["hello"], { type: "text/plain" }), "SKILL.md"); const response = await __handlers.publishSkillV1Handler( - makeCtx({ runMutation, storage: { store: vi.fn().mockResolvedValue('storage:1') } }), - new Request('https://example.com/api/v1/skills', { - method: 'POST', - headers: { Authorization: 'Bearer clh_test' }, + makeCtx({ runMutation, storage: { store: vi.fn().mockResolvedValue("storage:1") } }), + new Request("https://example.com/api/v1/skills", { + method: "POST", + headers: { Authorization: "Bearer clh_test" }, body: form, }), - ) + ); if (response.status !== 200) { - throw new Error(await response.text()) + throw new Error(await response.text()); } - }) + }); - it('publish rejects missing token', async () => { - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("publish rejects missing token", async () => { + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.publishSkillV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills', { method: 'POST' }), - ) - expect(response.status).toBe(401) - }) + new Request("https://example.com/api/v1/skills", { method: "POST" }), + ); + expect(response.status).toBe(401); + }); - it('whoami returns user payload', async () => { + it("whoami returns user payload", async () => { vi.mocked(requireApiTokenUser).mockResolvedValueOnce({ - userId: 'users:1', - user: { handle: 'p', displayName: 'Peter', image: null }, - } as never) - const runMutation = vi.fn().mockResolvedValue(okRate()) + userId: "users:1", + user: { handle: "p", displayName: "Peter", image: null }, + } as never); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.whoamiV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/whoami', { - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/whoami", { + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.user.handle).toBe('p') - }) - - it('delete and undelete require auth', async () => { - vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error('Unauthorized')) - const runMutation = vi.fn().mockResolvedValue(okRate()) + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.user.handle).toBe("p"); + }); + + it("delete and undelete require auth", async () => { + vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error("Unauthorized")); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.skillsDeleteRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills/demo', { method: 'DELETE' }), - ) - expect(response.status).toBe(401) + new Request("https://example.com/api/v1/skills/demo", { method: "DELETE" }), + ); + expect(response.status).toBe(401); - vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error('Unauthorized')) + vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error("Unauthorized")); const response2 = await __handlers.skillsPostRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills/demo/undelete', { method: 'POST' }), - ) - expect(response2.status).toBe(401) - }) + new Request("https://example.com/api/v1/skills/demo/undelete", { method: "POST" }), + ); + expect(response2.status).toBe(401); + }); - it('delete and undelete succeed', async () => { + it("delete and undelete succeed", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); const runMutation = vi.fn(async (_query: unknown, args: Record) => { - if ('key' in args) return okRate() - return { ok: true } - }) + if ("key" in args) return okRate(); + return { ok: true }; + }); const response = await __handlers.skillsDeleteRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills/demo', { - method: 'DELETE', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/skills/demo", { + method: "DELETE", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(response.status).toBe(200) + ); + expect(response.status).toBe(200); const response2 = await __handlers.skillsPostRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/skills/demo/undelete', { - method: 'POST', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/skills/demo/undelete", { + method: "POST", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(response2.status).toBe(200) - }) + ); + expect(response2.status).toBe(200); + }); - it('ban user requires auth', async () => { - vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error('Unauthorized')) - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("ban user requires auth", async () => { + vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error("Unauthorized")); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/users/ban', { - method: 'POST', - headers: { 'content-type': 'application/json' }, - body: JSON.stringify({ handle: 'demo' }), + new Request("https://example.com/api/v1/users/ban", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ handle: "demo" }), }), - ) - expect(response.status).toBe(401) - }) + ); + expect(response.status).toBe(401); + }); - it('ban user succeeds with handle', async () => { + it("ban user succeeds with handle", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) - const runQuery = vi.fn().mockResolvedValue({ _id: 'users:2' }) + userId: "users:1", + user: { handle: "p" }, + } as never); + const runQuery = vi.fn().mockResolvedValue({ _id: "users:2" }); const runMutation = vi .fn() .mockResolvedValueOnce(okRate()) - .mockResolvedValueOnce({ ok: true, alreadyBanned: false, deletedSkills: 2 }) + .mockResolvedValueOnce({ ok: true, alreadyBanned: false, deletedSkills: 2 }); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/users/ban', { - method: 'POST', - headers: { 'content-type': 'application/json' }, - body: JSON.stringify({ handle: 'demo' }), + new Request("https://example.com/api/v1/users/ban", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ handle: "demo" }), }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.deletedSkills).toBe(2) - }) + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.deletedSkills).toBe(2); + }); - it('ban user forwards reason', async () => { + it("ban user forwards reason", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) - const runQuery = vi.fn().mockResolvedValue({ _id: 'users:2' }) + userId: "users:1", + user: { handle: "p" }, + } as never); + const runQuery = vi.fn().mockResolvedValue({ _id: "users:2" }); const runMutation = vi .fn() .mockResolvedValueOnce(okRate()) - .mockResolvedValueOnce({ ok: true, alreadyBanned: false, deletedSkills: 0 }) + .mockResolvedValueOnce({ ok: true, alreadyBanned: false, deletedSkills: 0 }); await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/users/ban', { - method: 'POST', - headers: { 'content-type': 'application/json' }, - body: JSON.stringify({ handle: 'demo', reason: 'malware' }), + new Request("https://example.com/api/v1/users/ban", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ handle: "demo", reason: "malware" }), }), - ) + ); expect(runMutation).toHaveBeenCalledWith( expect.anything(), expect.objectContaining({ - actorUserId: 'users:1', - targetUserId: 'users:2', - reason: 'malware', + actorUserId: "users:1", + targetUserId: "users:2", + reason: "malware", }), - ) - }) + ); + }); - it('set role requires auth', async () => { - vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error('Unauthorized')) - const runMutation = vi.fn().mockResolvedValue(okRate()) + it("set role requires auth", async () => { + vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error("Unauthorized")); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/users/role', { - method: 'POST', - headers: { 'content-type': 'application/json' }, - body: JSON.stringify({ handle: 'demo', role: 'moderator' }), + new Request("https://example.com/api/v1/users/role", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ handle: "demo", role: "moderator" }), }), - ) - expect(response.status).toBe(401) - }) + ); + expect(response.status).toBe(401); + }); - it('set role succeeds with handle', async () => { + it("set role succeeds with handle", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) - const runQuery = vi.fn().mockResolvedValue({ _id: 'users:2' }) + userId: "users:1", + user: { handle: "p" }, + } as never); + const runQuery = vi.fn().mockResolvedValue({ _id: "users:2" }); const runMutation = vi .fn() .mockResolvedValueOnce(okRate()) - .mockResolvedValueOnce({ ok: true, role: 'moderator' }) + .mockResolvedValueOnce({ ok: true, role: "moderator" }); const response = await __handlers.usersPostRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/users/role', { - method: 'POST', - headers: { 'content-type': 'application/json' }, - body: JSON.stringify({ handle: 'demo', role: 'moderator' }), + new Request("https://example.com/api/v1/users/role", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ handle: "demo", role: "moderator" }), }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.role).toBe('moderator') - }) - - it('stars require auth', async () => { - vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error('Unauthorized')) - const runMutation = vi.fn().mockResolvedValue(okRate()) + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.role).toBe("moderator"); + }); + + it("stars require auth", async () => { + vi.mocked(requireApiTokenUser).mockRejectedValueOnce(new Error("Unauthorized")); + const runMutation = vi.fn().mockResolvedValue(okRate()); const response = await __handlers.starsPostRouterV1Handler( makeCtx({ runMutation }), - new Request('https://example.com/api/v1/stars/demo', { method: 'POST' }), - ) - expect(response.status).toBe(401) - }) + new Request("https://example.com/api/v1/stars/demo", { method: "POST" }), + ); + expect(response.status).toBe(401); + }); - it('stars add succeeds', async () => { + it("stars add succeeds", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) - const runQuery = vi.fn().mockResolvedValue({ _id: 'skills:1' }) + userId: "users:1", + user: { handle: "p" }, + } as never); + const runQuery = vi.fn().mockResolvedValue({ _id: "skills:1" }); const runMutation = vi .fn() .mockResolvedValueOnce(okRate()) .mockResolvedValueOnce(okRate()) - .mockResolvedValueOnce({ ok: true, starred: true, alreadyStarred: false }) + .mockResolvedValueOnce({ ok: true, starred: true, alreadyStarred: false }); const response = await __handlers.starsPostRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/stars/demo', { - method: 'POST', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/stars/demo", { + method: "POST", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.ok).toBe(true) - expect(json.starred).toBe(true) - }) - - it('stars delete succeeds', async () => { + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.ok).toBe(true); + expect(json.starred).toBe(true); + }); + + it("stars delete succeeds", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) - const runQuery = vi.fn().mockResolvedValue({ _id: 'skills:1' }) + userId: "users:1", + user: { handle: "p" }, + } as never); + const runQuery = vi.fn().mockResolvedValue({ _id: "skills:1" }); const runMutation = vi .fn() .mockResolvedValueOnce(okRate()) .mockResolvedValueOnce(okRate()) - .mockResolvedValueOnce({ ok: true, unstarred: true, alreadyUnstarred: false }) + .mockResolvedValueOnce({ ok: true, unstarred: true, alreadyUnstarred: false }); const response = await __handlers.starsDeleteRouterV1Handler( makeCtx({ runQuery, runMutation }), - new Request('https://example.com/api/v1/stars/demo', { - method: 'DELETE', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/stars/demo", { + method: "DELETE", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(response.status).toBe(200) - const json = await response.json() - expect(json.ok).toBe(true) - expect(json.unstarred).toBe(true) - }) - - it('delete/undelete map forbidden/not-found/unknown to 403/404/500', async () => { + ); + expect(response.status).toBe(200); + const json = await response.json(); + expect(json.ok).toBe(true); + expect(json.unstarred).toBe(true); + }); + + it("delete/undelete map forbidden/not-found/unknown to 403/404/500", async () => { vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); const runMutationForbidden = vi.fn(async (_query: unknown, args: Record) => { - if ('key' in args) return okRate() - throw new Error('Forbidden') - }) + if ("key" in args) return okRate(); + throw new Error("Forbidden"); + }); const forbidden = await __handlers.skillsDeleteRouterV1Handler( makeCtx({ runMutation: runMutationForbidden }), - new Request('https://example.com/api/v1/skills/demo', { - method: 'DELETE', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/skills/demo", { + method: "DELETE", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(forbidden.status).toBe(403) - expect(await forbidden.text()).toBe('Forbidden') + ); + expect(forbidden.status).toBe(403); + expect(await forbidden.text()).toBe("Forbidden"); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); const runMutationNotFound = vi.fn(async (_query: unknown, args: Record) => { - if ('key' in args) return okRate() - throw new Error('Skill not found') - }) + if ("key" in args) return okRate(); + throw new Error("Skill not found"); + }); const notFound = await __handlers.skillsPostRouterV1Handler( makeCtx({ runMutation: runMutationNotFound }), - new Request('https://example.com/api/v1/skills/demo/undelete', { - method: 'POST', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/skills/demo/undelete", { + method: "POST", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(notFound.status).toBe(404) - expect(await notFound.text()).toBe('Skill not found') + ); + expect(notFound.status).toBe(404); + expect(await notFound.text()).toBe("Skill not found"); vi.mocked(requireApiTokenUser).mockResolvedValue({ - userId: 'users:1', - user: { handle: 'p' }, - } as never) + userId: "users:1", + user: { handle: "p" }, + } as never); const runMutationUnknown = vi.fn(async (_query: unknown, args: Record) => { - if ('key' in args) return okRate() - throw new Error('boom') - }) + if ("key" in args) return okRate(); + throw new Error("boom"); + }); const unknown = await __handlers.soulsDeleteRouterV1Handler( makeCtx({ runMutation: runMutationUnknown }), - new Request('https://example.com/api/v1/souls/demo-soul', { - method: 'DELETE', - headers: { Authorization: 'Bearer clh_test' }, + new Request("https://example.com/api/v1/souls/demo-soul", { + method: "DELETE", + headers: { Authorization: "Bearer clh_test" }, }), - ) - expect(unknown.status).toBe(500) - expect(await unknown.text()).toBe('Internal Server Error') - }) -}) + ); + expect(unknown.status).toBe(500); + expect(await unknown.text()).toBe("Internal Server Error"); + }); +}); diff --git a/convex/httpApiV1/skillsV1.ts b/convex/httpApiV1/skillsV1.ts index 358233d61..e2406e8af 100644 --- a/convex/httpApiV1/skillsV1.ts +++ b/convex/httpApiV1/skillsV1.ts @@ -1,9 +1,9 @@ -import { api, internal } from '../_generated/api' -import type { Doc, Id } from '../_generated/dataModel' -import type { ActionCtx } from '../_generated/server' -import { getOptionalApiTokenUserId, requireApiTokenUser } from '../lib/apiTokenAuth' -import { applyRateLimit, parseBearerToken } from '../lib/httpRateLimit' -import { publishVersionForUser } from '../skills' +import type { Doc, Id } from "../_generated/dataModel"; +import type { ActionCtx } from "../_generated/server"; +import { api, internal } from "../_generated/api"; +import { getOptionalApiTokenUserId, requireApiTokenUser } from "../lib/apiTokenAuth"; +import { applyRateLimit, parseBearerToken } from "../lib/httpRateLimit"; +import { publishVersionForUser } from "../skills"; import { MAX_RAW_FILE_BYTES, getPathSegments, @@ -15,96 +15,102 @@ import { softDeleteErrorToResponse, text, toOptionalNumber, -} from './shared' +} from "./shared"; type SearchSkillEntry = { - score: number + score: number; skill: { - slug?: string - displayName?: string - summary?: string | null - updatedAt?: number - } | null - version: { version?: string; createdAt?: number } | null -} + slug?: string; + displayName?: string; + summary?: string | null; + updatedAt?: number; + } | null; + version: { version?: string; createdAt?: number } | null; +}; type ListSkillsResult = { items: Array<{ skill: { - _id: Id<'skills'> - slug: string - displayName: string - summary?: string - tags: Record> - stats: unknown - createdAt: number - updatedAt: number - latestVersionId?: Id<'skillVersions'> - } - latestVersion: { version: string; createdAt: number; changelog: string } | null - }> - nextCursor: string | null -} - -type SkillFile = Doc<'skillVersions'>['files'][number] + _id: Id<"skills">; + slug: string; + displayName: string; + summary?: string; + tags: Record>; + stats: unknown; + createdAt: number; + updatedAt: number; + latestVersionId?: Id<"skillVersions">; + }; + latestVersion: { version: string; createdAt: number; changelog: string } | null; + }>; + nextCursor: string | null; +}; + +type SkillFile = Doc<"skillVersions">["files"][number]; type GetBySlugResult = { skill: { - _id: Id<'skills'> - slug: string - displayName: string - summary?: string - tags: Record> - stats: unknown - createdAt: number - updatedAt: number - } | null - latestVersion: Doc<'skillVersions'> | null - owner: { _id: Id<'users'>; handle?: string; displayName?: string; image?: string } | null + _id: Id<"skills">; + slug: string; + displayName: string; + summary?: string; + ownerUserId: Id<"users">; + tags: Record>; + stats: unknown; + createdAt: number; + updatedAt: number; + } | null; + latestVersion: Doc<"skillVersions"> | null; + owner: { _id: Id<"users">; handle?: string; displayName?: string; image?: string } | null; moderationInfo?: { - isPendingScan: boolean - isMalwareBlocked: boolean - isSuspicious: boolean - isHiddenByMod: boolean - isRemoved: boolean - reason?: string - } | null -} | null + isPendingScan: boolean; + isMalwareBlocked: boolean; + isSuspicious: boolean; + isHiddenByMod: boolean; + isRemoved: boolean; + verdict?: "clean" | "suspicious" | "malicious"; + reasonCodes?: string[]; + summary?: string; + engineVersion?: string; + updatedAt?: number; + reason?: string; + } | null; +} | null; type ListVersionsResult = { items: Array<{ - version: string - createdAt: number - changelog: string - changelogSource?: 'auto' | 'user' + version: string; + createdAt: number; + changelog: string; + changelogSource?: "auto" | "user"; files: Array<{ - path: string - size: number - storageId: Id<'_storage'> - sha256: string - contentType?: string - }> - softDeletedAt?: number - }> - nextCursor: string | null -} + path: string; + size: number; + storageId: Id<"_storage">; + sha256: string; + contentType?: string; + }>; + softDeletedAt?: number; + }>; + nextCursor: string | null; +}; export async function searchSkillsV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'read') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "read"); + if (!rate.ok) return rate.response; - const url = new URL(request.url) - const query = url.searchParams.get('q')?.trim() ?? '' - const limit = toOptionalNumber(url.searchParams.get('limit')) - const highlightedOnly = url.searchParams.get('highlightedOnly') === 'true' + const url = new URL(request.url); + const query = url.searchParams.get("q")?.trim() ?? ""; + const limit = toOptionalNumber(url.searchParams.get("limit")); + const highlightedOnly = url.searchParams.get("highlightedOnly") === "true"; - if (!query) return json({ results: [] }, 200, rate.headers) + if (!query) return json({ results: [] }, 200, rate.headers); const results = (await ctx.runAction(api.search.searchSkills, { query, limit, highlightedOnly: highlightedOnly || undefined, - })) as SearchSkillEntry[] + })) as SearchSkillEntry[]; return json( { @@ -119,73 +125,77 @@ export async function searchSkillsV1Handler(ctx: ActionCtx, request: Request) { }, 200, rate.headers, - ) + ); } export async function resolveSkillVersionV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'read') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "read"); + if (!rate.ok) return rate.response; - const url = new URL(request.url) - const slug = url.searchParams.get('slug')?.trim().toLowerCase() - const hash = url.searchParams.get('hash')?.trim().toLowerCase() - if (!slug || !hash) return text('Missing slug or hash', 400, rate.headers) - if (!/^[a-f0-9]{64}$/.test(hash)) return text('Invalid hash', 400, rate.headers) + const url = new URL(request.url); + const slug = url.searchParams.get("slug")?.trim().toLowerCase(); + const hash = url.searchParams.get("hash")?.trim().toLowerCase(); + if (!slug || !hash) return text("Missing slug or hash", 400, rate.headers); + if (!/^[a-f0-9]{64}$/.test(hash)) return text("Invalid hash", 400, rate.headers); - const resolved = await ctx.runQuery(api.skills.resolveVersionByHash, { slug, hash }) - if (!resolved) return text('Skill not found', 404, rate.headers) + const resolved = await ctx.runQuery(api.skills.resolveVersionByHash, { slug, hash }); + if (!resolved) return text("Skill not found", 404, rate.headers); - return json({ slug, match: resolved.match, latestVersion: resolved.latestVersion }, 200, rate.headers) + return json( + { slug, match: resolved.match, latestVersion: resolved.latestVersion }, + 200, + rate.headers, + ); } type SkillListSort = - | 'updated' - | 'downloads' - | 'stars' - | 'installsCurrent' - | 'installsAllTime' - | 'trending' + | "updated" + | "downloads" + | "stars" + | "installsCurrent" + | "installsAllTime" + | "trending"; function parseListSort(value: string | null): SkillListSort { - const normalized = value?.trim().toLowerCase() - if (normalized === 'downloads') return 'downloads' - if (normalized === 'stars' || normalized === 'rating') return 'stars' + const normalized = value?.trim().toLowerCase(); + if (normalized === "downloads") return "downloads"; + if (normalized === "stars" || normalized === "rating") return "stars"; if ( - normalized === 'installs' || - normalized === 'install' || - normalized === 'installscurrent' || - normalized === 'installs-current' + normalized === "installs" || + normalized === "install" || + normalized === "installscurrent" || + normalized === "installs-current" ) { - return 'installsCurrent' + return "installsCurrent"; } - if (normalized === 'installsalltime' || normalized === 'installs-all-time') { - return 'installsAllTime' + if (normalized === "installsalltime" || normalized === "installs-all-time") { + return "installsAllTime"; } - if (normalized === 'trending') return 'trending' - return 'updated' + if (normalized === "trending") return "trending"; + return "updated"; } export async function listSkillsV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'read') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "read"); + if (!rate.ok) return rate.response; - const url = new URL(request.url) - const limit = toOptionalNumber(url.searchParams.get('limit')) - const rawCursor = url.searchParams.get('cursor')?.trim() || undefined - const sort = parseListSort(url.searchParams.get('sort')) - const cursor = sort === 'trending' ? undefined : rawCursor + const url = new URL(request.url); + const limit = toOptionalNumber(url.searchParams.get("limit")); + const rawCursor = url.searchParams.get("cursor")?.trim() || undefined; + const sort = parseListSort(url.searchParams.get("sort")); + const cursor = sort === "trending" ? undefined : rawCursor; const result = (await ctx.runQuery(api.skills.listPublicPage, { limit, cursor, sort, - })) as ListSkillsResult + })) as ListSkillsResult; // Batch resolve all tags in a single query instead of N queries const resolvedTagsList = await resolveTagsBatch( ctx, result.items.map((item) => item.skill.tags), - ) + ); const items = result.items.map((item, idx) => ({ slug: item.skill.slug, @@ -202,9 +212,9 @@ export async function listSkillsV1Handler(ctx: ActionCtx, request: Request) { changelog: item.latestVersion.changelog, } : null, - })) + })); - return json({ items, nextCursor: result.nextCursor ?? null }, 200, rate.headers) + return json({ items, nextCursor: result.nextCursor ?? null }, 200, rate.headers); } async function describeOwnerVisibleSkillState( @@ -212,68 +222,71 @@ async function describeOwnerVisibleSkillState( request: Request, slug: string, ): Promise<{ status: number; message: string } | null> { - const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }) - if (!skill) return null + const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }); + if (!skill) return null; - const apiTokenUserId = await getOptionalApiTokenUserId(ctx, request) - const isOwner = Boolean(apiTokenUserId && apiTokenUserId === skill.ownerUserId) - if (!isOwner) return null + const apiTokenUserId = await getOptionalApiTokenUserId(ctx, request); + const isOwner = Boolean(apiTokenUserId && apiTokenUserId === skill.ownerUserId); + if (!isOwner) return null; if (skill.softDeletedAt) { return { status: 410, message: `Skill is hidden/deleted. Run "clawhub undelete ${slug}" to restore it.`, - } + }; } - if (skill.moderationStatus === 'hidden') { - if (skill.moderationReason === 'pending.scan' || skill.moderationReason === 'scanner.vt.pending') { + if (skill.moderationStatus === "hidden") { + if ( + skill.moderationReason === "pending.scan" || + skill.moderationReason === "scanner.vt.pending" + ) { return { status: 423, - message: 'Skill is hidden while security scan is pending. Try again in a few minutes.', - } + message: "Skill is hidden while security scan is pending. Try again in a few minutes.", + }; } - if (skill.moderationReason === 'quality.low') { + if (skill.moderationReason === "quality.low") { return { status: 403, message: 'Skill is hidden by quality checks. Update SKILL.md content or run "clawhub undelete " after review.', - } + }; } return { status: 403, message: `Skill is hidden by moderation${ - skill.moderationReason ? ` (${skill.moderationReason})` : '' + skill.moderationReason ? ` (${skill.moderationReason})` : "" }.`, - } + }; } - if (skill.moderationStatus === 'removed') { - return { status: 410, message: 'Skill has been removed by moderation.' } + if (skill.moderationStatus === "removed") { + return { status: 410, message: "Skill has been removed by moderation." }; } - return null + return null; } export async function skillsGetRouterV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'read') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "read"); + if (!rate.ok) return rate.response; - const segments = getPathSegments(request, '/api/v1/skills/') - if (segments.length === 0) return text('Missing slug', 400, rate.headers) - const slug = segments[0]?.trim().toLowerCase() ?? '' - const second = segments[1] - const third = segments[2] + const segments = getPathSegments(request, "/api/v1/skills/"); + if (segments.length === 0) return text("Missing slug", 400, rate.headers); + const slug = segments[0]?.trim().toLowerCase() ?? ""; + const second = segments[1]; + const third = segments[2]; if (segments.length === 1) { - const result = (await ctx.runQuery(api.skills.getBySlug, { slug })) as GetBySlugResult + const result = (await ctx.runQuery(api.skills.getBySlug, { slug })) as GetBySlugResult; if (!result?.skill) { - const hidden = await describeOwnerVisibleSkillState(ctx, request, slug) - if (hidden) return text(hidden.message, hidden.status, rate.headers) - return text('Skill not found', 404, rate.headers) + const hidden = await describeOwnerVisibleSkillState(ctx, request, slug); + if (hidden) return text(hidden.message, hidden.status, rate.headers); + return text("Skill not found", 404, rate.headers); } - const [tags] = await resolveTagsBatch(ctx, [result.skill.tags]) + const [tags] = await resolveTagsBatch(ctx, [result.skill.tags]); return json( { skill: { @@ -304,26 +317,101 @@ export async function skillsGetRouterV1Handler(ctx: ActionCtx, request: Request) ? { isSuspicious: result.moderationInfo.isSuspicious ?? false, isMalwareBlocked: result.moderationInfo.isMalwareBlocked ?? false, + verdict: result.moderationInfo.verdict ?? "clean", + reasonCodes: result.moderationInfo.reasonCodes ?? [], + summary: result.moderationInfo.summary ?? null, + engineVersion: result.moderationInfo.engineVersion ?? null, + updatedAt: result.moderationInfo.updatedAt ?? null, + } + : null, + }, + 200, + rate.headers, + ); + } + + if (second === "moderation" && segments.length === 2) { + const result = (await ctx.runQuery(api.skills.getBySlug, { slug })) as GetBySlugResult; + if (!result?.skill) { + const hidden = await describeOwnerVisibleSkillState(ctx, request, slug); + if (hidden) return text(hidden.message, hidden.status, rate.headers); + return text("Skill not found", 404, rate.headers); + } + + const apiTokenUserId = await getOptionalApiTokenUserId(ctx, request); + const isOwner = Boolean(apiTokenUserId && apiTokenUserId === result.skill.ownerUserId); + let isStaff = false; + if (apiTokenUserId) { + const caller = await ctx.runQuery(internal.users.getByIdInternal, { userId: apiTokenUserId }); + if (caller?.role === "admin" || caller?.role === "moderator") { + isStaff = true; + } + } + + const mod = result.moderationInfo; + const isFlagged = Boolean(mod?.isSuspicious || mod?.isMalwareBlocked); + if (!isOwner && !isStaff && !isFlagged) { + return text("Moderation details unavailable", 404, rate.headers); + } + + const allEvidence = + ( + result.skill as { + moderationEvidence?: Array<{ + code: string; + severity: "info" | "warn" | "critical"; + file: string; + line: number; + message: string; + evidence: string; + }>; + } + ).moderationEvidence ?? []; + const evidence = + isOwner || isStaff + ? allEvidence + : allEvidence.map((entry) => ({ + code: entry.code, + severity: entry.severity, + file: entry.file, + line: entry.line, + message: entry.message, + evidence: "", + })); + + return json( + { + moderation: mod + ? { + isSuspicious: mod.isSuspicious ?? false, + isMalwareBlocked: mod.isMalwareBlocked ?? false, + verdict: mod.verdict ?? "clean", + reasonCodes: mod.reasonCodes ?? [], + summary: mod.summary ?? null, + engineVersion: mod.engineVersion ?? null, + updatedAt: mod.updatedAt ?? null, + evidence, + legacyReason: isOwner || isStaff ? (mod.reason ?? null) : null, } : null, }, 200, rate.headers, - ) + ); } - if (second === 'versions' && segments.length === 2) { - const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }) - if (!skill || skill.softDeletedAt) return text('Skill not found', 404, rate.headers) + if (second === "versions" && segments.length === 2) { + const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }); + if (!skill || skill.softDeletedAt) return text("Skill not found", 404, rate.headers); - const url = new URL(request.url) - const limit = toOptionalNumber(url.searchParams.get('limit')) - const cursor = url.searchParams.get('cursor')?.trim() || undefined + const url = new URL(request.url); + const limit = toOptionalNumber(url.searchParams.get("limit")); + const cursor = url.searchParams.get("cursor")?.trim() || undefined; const result = (await ctx.runQuery(api.skills.listVersionsPage, { skillId: skill._id, limit, cursor, - })) as ListVersionsResult + })) as ListVersionsResult; const items = result.items .filter((version) => !version.softDeletedAt) @@ -332,21 +420,21 @@ export async function skillsGetRouterV1Handler(ctx: ActionCtx, request: Request) createdAt: version.createdAt, changelog: version.changelog, changelogSource: version.changelogSource ?? null, - })) + })); - return json({ items, nextCursor: result.nextCursor ?? null }, 200, rate.headers) + return json({ items, nextCursor: result.nextCursor ?? null }, 200, rate.headers); } - if (second === 'versions' && third && segments.length === 3) { - const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }) - if (!skill || skill.softDeletedAt) return text('Skill not found', 404, rate.headers) + if (second === "versions" && third && segments.length === 3) { + const skill = await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug }); + if (!skill || skill.softDeletedAt) return text("Skill not found", 404, rate.headers); const version = await ctx.runQuery(api.skills.getVersionBySkillAndVersion, { skillId: skill._id, version: third, - }) - if (!version) return text('Version not found', 404, rate.headers) - if (version.softDeletedAt) return text('Version not available', 410, rate.headers) + }); + if (!version) return text("Version not found", 404, rate.headers); + if (version.softDeletedAt) return text("Version not available", 410, rate.headers); return json( { @@ -366,46 +454,46 @@ export async function skillsGetRouterV1Handler(ctx: ActionCtx, request: Request) }, 200, rate.headers, - ) + ); } - if (second === 'file' && segments.length === 2) { - const url = new URL(request.url) - const path = url.searchParams.get('path')?.trim() - if (!path) return text('Missing path', 400, rate.headers) - const versionParam = url.searchParams.get('version')?.trim() - const tagParam = url.searchParams.get('tag')?.trim() + if (second === "file" && segments.length === 2) { + const url = new URL(request.url); + const path = url.searchParams.get("path")?.trim(); + if (!path) return text("Missing path", 400, rate.headers); + const versionParam = url.searchParams.get("version")?.trim(); + const tagParam = url.searchParams.get("tag")?.trim(); - const skillResult = (await ctx.runQuery(api.skills.getBySlug, { slug })) as GetBySlugResult - if (!skillResult?.skill) return text('Skill not found', 404, rate.headers) + const skillResult = (await ctx.runQuery(api.skills.getBySlug, { slug })) as GetBySlugResult; + if (!skillResult?.skill) return text("Skill not found", 404, rate.headers); - let version = skillResult.latestVersion + let version = skillResult.latestVersion; if (versionParam) { version = await ctx.runQuery(api.skills.getVersionBySkillAndVersion, { skillId: skillResult.skill._id, version: versionParam, - }) + }); } else if (tagParam) { - const versionId = skillResult.skill.tags[tagParam] + const versionId = skillResult.skill.tags[tagParam]; if (versionId) { - version = await ctx.runQuery(api.skills.getVersionById, { versionId }) + version = await ctx.runQuery(api.skills.getVersionById, { versionId }); } } - if (!version) return text('Version not found', 404, rate.headers) - if (version.softDeletedAt) return text('Version not available', 410, rate.headers) + if (!version) return text("Version not found", 404, rate.headers); + if (version.softDeletedAt) return text("Version not available", 410, rate.headers); - const normalized = path.trim() - const normalizedLower = normalized.toLowerCase() + const normalized = path.trim(); + const normalizedLower = normalized.toLowerCase(); const file = version.files.find((entry) => entry.path === normalized) ?? - version.files.find((entry) => entry.path.toLowerCase() === normalizedLower) - if (!file) return text('File not found', 404, rate.headers) - if (file.size > MAX_RAW_FILE_BYTES) return text('File exceeds 200KB limit', 413, rate.headers) + version.files.find((entry) => entry.path.toLowerCase() === normalizedLower); + if (!file) return text("File not found", 404, rate.headers); + if (file.size > MAX_RAW_FILE_BYTES) return text("File exceeds 200KB limit", 413, rate.headers); - const blob = await ctx.storage.get(file.storageId) - if (!blob) return text('File missing in storage', 410, rate.headers) - const textContent = await blob.text() + const blob = await ctx.storage.get(file.storageId); + if (!blob) return text("File missing in storage", 410, rate.headers); + const textContent = await blob.text(); return safeTextFileResponse({ textContent, path: file.path, @@ -413,83 +501,83 @@ export async function skillsGetRouterV1Handler(ctx: ActionCtx, request: Request) sha256: file.sha256, size: file.size, headers: rate.headers, - }) + }); } - return text('Not found', 404, rate.headers) + return text("Not found", 404, rate.headers); } export async function publishSkillV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'write') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "write"); + if (!rate.ok) return rate.response; try { - if (!parseBearerToken(request)) return text('Unauthorized', 401, rate.headers) + if (!parseBearerToken(request)) return text("Unauthorized", 401, rate.headers); } catch { - return text('Unauthorized', 401, rate.headers) + return text("Unauthorized", 401, rate.headers); } - const { userId } = await requireApiTokenUser(ctx, request) + const { userId } = await requireApiTokenUser(ctx, request); - const contentType = request.headers.get('content-type') ?? '' + const contentType = request.headers.get("content-type") ?? ""; try { - if (contentType.includes('application/json')) { - const body = await request.json() - const payload = parsePublishBody(body) - const result = await publishVersionForUser(ctx, userId, payload) - return json({ ok: true, ...result }, 200, rate.headers) + if (contentType.includes("application/json")) { + const body = await request.json(); + const payload = parsePublishBody(body); + const result = await publishVersionForUser(ctx, userId, payload); + return json({ ok: true, ...result }, 200, rate.headers); } - if (contentType.includes('multipart/form-data')) { - const payload = await parseMultipartPublish(ctx, request) - const result = await publishVersionForUser(ctx, userId, payload) - return json({ ok: true, ...result }, 200, rate.headers) + if (contentType.includes("multipart/form-data")) { + const payload = await parseMultipartPublish(ctx, request); + const result = await publishVersionForUser(ctx, userId, payload); + return json({ ok: true, ...result }, 200, rate.headers); } } catch (error) { - const message = error instanceof Error ? error.message : 'Publish failed' - return text(message, 400, rate.headers) + const message = error instanceof Error ? error.message : "Publish failed"; + return text(message, 400, rate.headers); } - return text('Unsupported content type', 415, rate.headers) + return text("Unsupported content type", 415, rate.headers); } export async function skillsPostRouterV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'write') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "write"); + if (!rate.ok) return rate.response; - const segments = getPathSegments(request, '/api/v1/skills/') - if (segments.length !== 2 || segments[1] !== 'undelete') { - return text('Not found', 404, rate.headers) + const segments = getPathSegments(request, "/api/v1/skills/"); + if (segments.length !== 2 || segments[1] !== "undelete") { + return text("Not found", 404, rate.headers); } - const slug = segments[0]?.trim().toLowerCase() ?? '' + const slug = segments[0]?.trim().toLowerCase() ?? ""; try { - const { userId } = await requireApiTokenUser(ctx, request) + const { userId } = await requireApiTokenUser(ctx, request); await ctx.runMutation(internal.skills.setSkillSoftDeletedInternal, { userId, slug, deleted: false, - }) - return json({ ok: true }, 200, rate.headers) + }); + return json({ ok: true }, 200, rate.headers); } catch (error) { - return softDeleteErrorToResponse('skill', error, rate.headers) + return softDeleteErrorToResponse("skill", error, rate.headers); } } export async function skillsDeleteRouterV1Handler(ctx: ActionCtx, request: Request) { - const rate = await applyRateLimit(ctx, request, 'write') - if (!rate.ok) return rate.response + const rate = await applyRateLimit(ctx, request, "write"); + if (!rate.ok) return rate.response; - const segments = getPathSegments(request, '/api/v1/skills/') - if (segments.length !== 1) return text('Not found', 404, rate.headers) - const slug = segments[0]?.trim().toLowerCase() ?? '' + const segments = getPathSegments(request, "/api/v1/skills/"); + if (segments.length !== 1) return text("Not found", 404, rate.headers); + const slug = segments[0]?.trim().toLowerCase() ?? ""; try { - const { userId } = await requireApiTokenUser(ctx, request) + const { userId } = await requireApiTokenUser(ctx, request); await ctx.runMutation(internal.skills.setSkillSoftDeletedInternal, { userId, slug, deleted: true, - }) - return json({ ok: true }, 200, rate.headers) + }); + return json({ ok: true }, 200, rate.headers); } catch (error) { - return softDeleteErrorToResponse('skill', error, rate.headers) + return softDeleteErrorToResponse("skill", error, rate.headers); } } diff --git a/convex/lib/moderation.ts b/convex/lib/moderation.ts index 095afdc6d..3a2da9d4f 100644 --- a/convex/lib/moderation.ts +++ b/convex/lib/moderation.ts @@ -1,49 +1,49 @@ -import type { Doc } from '../_generated/dataModel' +import type { Doc } from "../_generated/dataModel"; -const FLAG_RULES: Array<{ flag: string; pattern: RegExp }> = [ - // Known-bad / known-suspicious identifiers. - // NOTE: keep these narrowly scoped; use staff review to confirm removals. - { - flag: 'blocked.malware', - pattern: /(keepcold131\/ClawdAuthenticatorTool|ClawdAuthenticatorTool)/i, - }, - - { flag: 'suspicious.keyword', pattern: /(malware|stealer|phish|phishing|keylogger)/i }, - { flag: 'suspicious.secrets', pattern: /(api[-_ ]?key|token|password|private key|secret)/i }, - { flag: 'suspicious.crypto', pattern: /(wallet|seed phrase|mnemonic|crypto)/i }, - { flag: 'suspicious.webhook', pattern: /(discord\.gg|webhook|hooks\.slack)/i }, - { flag: 'suspicious.script', pattern: /(curl[^\n]+\|\s*(sh|bash))/i }, - { flag: 'suspicious.url_shortener', pattern: /(bit\.ly|tinyurl\.com|t\.co|goo\.gl|is\.gd)/i }, -] +const KNOWN_BLOCKED_SIGNATURE = /(keepcold131\/ClawdAuthenticatorTool|ClawdAuthenticatorTool)/i; +const SUSPICIOUS_INSTALL_URL = /https?:\/\/(bit\.ly|tinyurl\.com|t\.co|goo\.gl|is\.gd)\//i; +const SUSPICIOUS_RAW_IP_URL = /https?:\/\/\d{1,3}(?:\.\d{1,3}){3}/i; +const SUSPICIOUS_SCRIPT_PIPE = /curl[^\n]+\|\s*(sh|bash)/i; export function deriveModerationFlags({ skill, parsed, files, }: { - skill: Pick, 'slug' | 'displayName' | 'summary'> - parsed: Doc<'skillVersions'>['parsed'] - files: Doc<'skillVersions'>['files'] + skill: Pick, "slug" | "displayName" | "summary">; + parsed: Doc<"skillVersions">["parsed"]; + files: Doc<"skillVersions">["files"]; }) { const text = [ skill.slug, skill.displayName, - skill.summary ?? '', + skill.summary ?? "", JSON.stringify(parsed?.frontmatter ?? {}), JSON.stringify(parsed?.metadata ?? {}), JSON.stringify((parsed as { moltbot?: unknown } | undefined)?.moltbot ?? {}), ...files.map((file) => file.path), ] .filter(Boolean) - .join('\n') + .join("\n"); + + const flags = new Set(); + if (KNOWN_BLOCKED_SIGNATURE.test(text)) { + flags.add("blocked.malware"); + } - const flags = new Set() + // Context-aware suspicious checks only. Avoid broad keyword-only flags to reduce false positives. + if ( + SUSPICIOUS_INSTALL_URL.test(text) || + SUSPICIOUS_RAW_IP_URL.test(text) || + SUSPICIOUS_SCRIPT_PIPE.test(text) + ) { + flags.add("flagged.suspicious"); + } - for (const rule of FLAG_RULES) { - if (rule.pattern.test(text)) { - flags.add(rule.flag) - } + const always = (parsed?.frontmatter as Record | undefined)?.always; + if (always === true || always === "true") { + flags.add("flagged.suspicious"); } - return Array.from(flags) + return Array.from(flags); } diff --git a/convex/lib/moderationEngine.test.ts b/convex/lib/moderationEngine.test.ts new file mode 100644 index 000000000..121f499c3 --- /dev/null +++ b/convex/lib/moderationEngine.test.ts @@ -0,0 +1,54 @@ +import { describe, expect, it } from "vitest"; +import { buildModerationSnapshot, runStaticModerationScan } from "./moderationEngine"; + +describe("moderationEngine", () => { + it("does not flag benign token/password docs text alone", () => { + const result = runStaticModerationScan({ + slug: "demo", + displayName: "Demo", + summary: "A normal integration skill", + frontmatter: {}, + metadata: {}, + files: [{ path: "SKILL.md", size: 64 }], + fileContents: [ + { + path: "SKILL.md", + content: + "This skill requires API token and password from the official provider settings.", + }, + ], + }); + expect(result.reasonCodes).toEqual([]); + expect(result.status).toBe("clean"); + }); + + it("flags dynamic eval usage as suspicious", () => { + const result = runStaticModerationScan({ + slug: "demo", + displayName: "Demo", + summary: "A normal integration skill", + frontmatter: {}, + metadata: {}, + files: [{ path: "index.ts", size: 64 }], + fileContents: [{ path: "index.ts", content: "const value = eval(code)" }], + }); + expect(result.reasonCodes).toContain("suspicious.dynamic_code_execution"); + expect(result.status).toBe("suspicious"); + }); + + it("upgrades merged verdict to malicious when VT is malicious", () => { + const snapshot = buildModerationSnapshot({ + staticScan: { + status: "suspicious", + reasonCodes: ["suspicious.dynamic_code_execution"], + findings: [], + summary: "", + engineVersion: "v2.0.0", + checkedAt: Date.now(), + }, + vtStatus: "malicious", + }); + expect(snapshot.verdict).toBe("malicious"); + expect(snapshot.reasonCodes).toContain("malicious.vt_malicious"); + }); +}); diff --git a/convex/lib/moderationEngine.ts b/convex/lib/moderationEngine.ts new file mode 100644 index 000000000..95845add2 --- /dev/null +++ b/convex/lib/moderationEngine.ts @@ -0,0 +1,360 @@ +import type { Doc, Id } from "../_generated/dataModel"; +import { + legacyFlagsFromVerdict, + MODERATION_ENGINE_VERSION, + normalizeReasonCodes, + type ModerationFinding, + REASON_CODES, + summarizeReasonCodes, + type ModerationVerdict, + verdictFromCodes, +} from "./moderationReasonCodes"; + +type TextFile = { path: string; content: string }; + +export type StaticScanInput = { + slug: string; + displayName: string; + summary?: string; + frontmatter: Record; + metadata?: unknown; + files: Array<{ path: string; size: number }>; + fileContents: TextFile[]; +}; + +export type StaticScanResult = { + status: ModerationVerdict; + reasonCodes: string[]; + findings: ModerationFinding[]; + summary: string; + engineVersion: string; + checkedAt: number; +}; + +export type ModerationSnapshot = { + verdict: ModerationVerdict; + reasonCodes: string[]; + evidence: ModerationFinding[]; + summary: string; + engineVersion: string; + evaluatedAt: number; + sourceVersionId?: Id<"skillVersions">; + legacyFlags?: string[]; +}; + +const MANIFEST_EXTENSION = /\.(json|yaml|yml|toml)$/i; +const MARKDOWN_EXTENSION = /\.(md|markdown|mdx)$/i; +const CODE_EXTENSION = /\.(js|ts|mjs|cjs|mts|cts|jsx|tsx|py|sh|bash|zsh|rb|go)$/i; + +const STANDARD_PORTS = new Set([80, 443, 8080, 8443, 3000]); + +function truncateEvidence(evidence: string, maxLen = 160) { + if (evidence.length <= maxLen) return evidence; + return `${evidence.slice(0, maxLen)}…`; +} + +function addFinding( + findings: ModerationFinding[], + finding: Omit & { evidence: string }, +) { + findings.push({ ...finding, evidence: truncateEvidence(finding.evidence.trim()) }); +} + +function findFirstLine(content: string, pattern: RegExp) { + const lines = content.split("\n"); + for (let i = 0; i < lines.length; i += 1) { + if (pattern.test(lines[i])) { + return { line: i + 1, text: lines[i] }; + } + } + return { line: 1, text: lines[0] ?? "" }; +} + +function scanCodeFile(path: string, content: string, findings: ModerationFinding[]) { + if (!CODE_EXTENSION.test(path)) return; + const hasChildProcess = /child_process/.test(content); + if (hasChildProcess) { + const match = findFirstLine( + content, + /\b(exec|execSync|spawn|spawnSync|execFile|execFileSync)\s*\(/, + ); + if (match.text) { + addFinding(findings, { + code: REASON_CODES.DANGEROUS_EXEC, + severity: "critical", + file: path, + line: match.line, + message: "Shell command execution detected (child_process).", + evidence: match.text, + }); + } + } + + const dynamicCodeMatch = findFirstLine(content, /\beval\s*\(|new\s+Function\s*\(/); + if (dynamicCodeMatch.text && /\beval\s*\(|new\s+Function\s*\(/.test(content)) { + addFinding(findings, { + code: REASON_CODES.DYNAMIC_CODE, + severity: "critical", + file: path, + line: dynamicCodeMatch.line, + message: "Dynamic code execution detected.", + evidence: dynamicCodeMatch.text, + }); + } + + if (/stratum\+tcp|stratum\+ssl|coinhive|cryptonight|xmrig/i.test(content)) { + const match = findFirstLine(content, /stratum\+tcp|stratum\+ssl|coinhive|cryptonight|xmrig/i); + addFinding(findings, { + code: REASON_CODES.CRYPTO_MINING, + severity: "critical", + file: path, + line: match.line, + message: "Possible crypto mining behavior detected.", + evidence: match.text, + }); + } + + const wsMatch = content.match(/new\s+WebSocket\s*\(\s*["']wss?:\/\/[^"']*:(\d+)/); + if (wsMatch) { + const port = Number.parseInt(wsMatch[1] ?? "", 10); + if (Number.isFinite(port) && !STANDARD_PORTS.has(port)) { + const match = findFirstLine(content, /new\s+WebSocket\s*\(/); + addFinding(findings, { + code: REASON_CODES.SUSPICIOUS_NETWORK, + severity: "warn", + file: path, + line: match.line, + message: "WebSocket connection to non-standard port detected.", + evidence: match.text, + }); + } + } + + const hasFileRead = /readFileSync|readFile/.test(content); + const hasNetworkSend = /\bfetch\b|http\.request|\baxios\b/.test(content); + if (hasFileRead && hasNetworkSend) { + const match = findFirstLine(content, /readFileSync|readFile/); + addFinding(findings, { + code: REASON_CODES.EXFILTRATION, + severity: "warn", + file: path, + line: match.line, + message: "File read combined with network send (possible exfiltration).", + evidence: match.text, + }); + } + + const hasProcessEnv = /process\.env/.test(content); + if (hasProcessEnv && hasNetworkSend) { + const match = findFirstLine(content, /process\.env/); + addFinding(findings, { + code: REASON_CODES.CREDENTIAL_HARVEST, + severity: "critical", + file: path, + line: match.line, + message: "Environment variable access combined with network send.", + evidence: match.text, + }); + } + + if ( + /(\\x[0-9a-fA-F]{2}){6,}/.test(content) || + /(?:atob|Buffer\.from)\s*\(\s*["'][A-Za-z0-9+/=]{200,}["']/.test(content) + ) { + const match = findFirstLine(content, /(\\x[0-9a-fA-F]{2}){6,}|(?:atob|Buffer\.from)\s*\(/); + addFinding(findings, { + code: REASON_CODES.OBFUSCATED_CODE, + severity: "warn", + file: path, + line: match.line, + message: "Potential obfuscated payload detected.", + evidence: match.text, + }); + } +} + +function scanMarkdownFile(path: string, content: string, findings: ModerationFinding[]) { + if (!MARKDOWN_EXTENSION.test(path)) return; + if ( + /ignore\s+(all\s+)?previous\s+instructions/i.test(content) || + /system\s*prompt\s*[:=]/i.test(content) || + /you\s+are\s+now\s+(a|an)\b/i.test(content) + ) { + const match = findFirstLine( + content, + /ignore\s+(all\s+)?previous\s+instructions|system\s*prompt\s*[:=]|you\s+are\s+now\s+(a|an)\b/i, + ); + addFinding(findings, { + code: REASON_CODES.INJECTION_INSTRUCTIONS, + severity: "warn", + file: path, + line: match.line, + message: "Prompt-injection style instruction pattern detected.", + evidence: match.text, + }); + } +} + +function scanManifestFile(path: string, content: string, findings: ModerationFinding[]) { + if (!MANIFEST_EXTENSION.test(path)) return; + if ( + /https?:\/\/(bit\.ly|tinyurl\.com|t\.co|goo\.gl|is\.gd)\//i.test(content) || + /https?:\/\/\d{1,3}(?:\.\d{1,3}){3}/i.test(content) + ) { + const match = findFirstLine( + content, + /https?:\/\/(bit\.ly|tinyurl\.com|t\.co|goo\.gl|is\.gd)\/|https?:\/\/\d{1,3}(?:\.\d{1,3}){3}/i, + ); + addFinding(findings, { + code: REASON_CODES.SUSPICIOUS_INSTALL_SOURCE, + severity: "warn", + file: path, + line: match.line, + message: "Install source points to URL shortener or raw IP.", + evidence: match.text, + }); + } +} + +export function runStaticModerationScan(input: StaticScanInput): StaticScanResult { + const findings: ModerationFinding[] = []; + const files = [...input.fileContents].sort((a, b) => a.path.localeCompare(b.path)); + + for (const file of files) { + scanCodeFile(file.path, file.content, findings); + scanMarkdownFile(file.path, file.content, findings); + scanManifestFile(file.path, file.content, findings); + } + + const installJson = JSON.stringify(input.metadata ?? {}); + if (/https?:\/\/(bit\.ly|tinyurl\.com|t\.co|goo\.gl|is\.gd)\//i.test(installJson)) { + addFinding(findings, { + code: REASON_CODES.SUSPICIOUS_INSTALL_SOURCE, + severity: "warn", + file: "metadata", + line: 1, + message: "Install metadata references shortener URL.", + evidence: installJson, + }); + } + + const alwaysValue = input.frontmatter.always; + if (alwaysValue === true || alwaysValue === "true") { + addFinding(findings, { + code: REASON_CODES.MANIFEST_PRIVILEGED_ALWAYS, + severity: "warn", + file: "SKILL.md", + line: 1, + message: "Skill is configured with always=true (persistent invocation).", + evidence: "always: true", + }); + } + + const identityText = `${input.slug}\n${input.displayName}\n${input.summary ?? ""}`; + if (/keepcold131\/ClawdAuthenticatorTool|ClawdAuthenticatorTool/i.test(identityText)) { + addFinding(findings, { + code: REASON_CODES.KNOWN_BLOCKED_SIGNATURE, + severity: "critical", + file: "metadata", + line: 1, + message: "Matched a known blocked malware signature.", + evidence: identityText, + }); + } + + findings.sort((a, b) => + `${a.code}:${a.file}:${a.line}:${a.message}`.localeCompare( + `${b.code}:${b.file}:${b.line}:${b.message}`, + ), + ); + + const reasonCodes = normalizeReasonCodes(findings.map((f) => f.code)); + const status = verdictFromCodes(reasonCodes); + return { + status, + reasonCodes, + findings, + summary: summarizeReasonCodes(reasonCodes), + engineVersion: MODERATION_ENGINE_VERSION, + checkedAt: Date.now(), + }; +} + +function dedupeEvidence(evidence: ModerationFinding[]) { + const seen = new Set(); + const out: ModerationFinding[] = []; + for (const item of evidence) { + const key = `${item.code}:${item.file}:${item.line}:${item.message}`; + if (seen.has(key)) continue; + seen.add(key); + out.push(item); + } + return out.slice(0, 40); +} + +export function buildModerationSnapshot(params: { + staticScan?: StaticScanResult; + vtStatus?: string; + llmStatus?: string; + existingReasonCodes?: string[]; + existingEvidence?: ModerationFinding[]; + sourceVersionId?: Id<"skillVersions">; +}): ModerationSnapshot { + const reasonCodes = [...(params.existingReasonCodes ?? [])]; + const evidence = [...(params.existingEvidence ?? [])]; + + if (params.staticScan) { + reasonCodes.push(...params.staticScan.reasonCodes); + evidence.push(...params.staticScan.findings); + } + + const vt = params.vtStatus?.trim().toLowerCase(); + if (vt === "malicious") reasonCodes.push("malicious.vt_malicious"); + if (vt === "suspicious") reasonCodes.push("suspicious.vt_suspicious"); + + const llm = params.llmStatus?.trim().toLowerCase(); + if (llm === "malicious") reasonCodes.push("malicious.llm_malicious"); + if (llm === "suspicious") reasonCodes.push("suspicious.llm_suspicious"); + + const normalizedCodes = normalizeReasonCodes(reasonCodes); + const verdict = verdictFromCodes(normalizedCodes); + const normalizedEvidence = dedupeEvidence(evidence); + return { + verdict, + reasonCodes: normalizedCodes, + evidence: normalizedEvidence, + summary: summarizeReasonCodes(normalizedCodes), + engineVersion: MODERATION_ENGINE_VERSION, + evaluatedAt: Date.now(), + sourceVersionId: params.sourceVersionId, + legacyFlags: legacyFlagsFromVerdict(verdict), + }; +} + +export function resolveSkillVerdict( + skill: Pick< + Doc<"skills">, + "moderationVerdict" | "moderationFlags" | "moderationReason" | "moderationReasonCodes" + >, +): ModerationVerdict { + if (skill.moderationVerdict) return skill.moderationVerdict; + if (skill.moderationFlags?.includes("blocked.malware")) return "malicious"; + if (skill.moderationFlags?.includes("flagged.suspicious")) return "suspicious"; + if ( + skill.moderationReason?.startsWith("scanner.") && + skill.moderationReason.endsWith(".malicious") + ) { + return "malicious"; + } + if ( + skill.moderationReason?.startsWith("scanner.") && + skill.moderationReason.endsWith(".suspicious") + ) { + return "suspicious"; + } + if ((skill.moderationReasonCodes ?? []).some((code) => code.startsWith("malicious."))) { + return "malicious"; + } + if ((skill.moderationReasonCodes ?? []).length > 0) return "suspicious"; + return "clean"; +} diff --git a/convex/lib/moderationReasonCodes.ts b/convex/lib/moderationReasonCodes.ts new file mode 100644 index 000000000..52190f31a --- /dev/null +++ b/convex/lib/moderationReasonCodes.ts @@ -0,0 +1,60 @@ +export type ModerationVerdict = "clean" | "suspicious" | "malicious"; + +export type ModerationFindingSeverity = "info" | "warn" | "critical"; + +export type ModerationFinding = { + code: string; + severity: ModerationFindingSeverity; + file: string; + line: number; + message: string; + evidence: string; +}; + +export const MODERATION_ENGINE_VERSION = "v2.0.0"; + +export const REASON_CODES = { + DANGEROUS_EXEC: "suspicious.dangerous_exec", + DYNAMIC_CODE: "suspicious.dynamic_code_execution", + CREDENTIAL_HARVEST: "malicious.env_harvesting", + EXFILTRATION: "suspicious.potential_exfiltration", + OBFUSCATED_CODE: "suspicious.obfuscated_code", + SUSPICIOUS_NETWORK: "suspicious.suspicious_network", + CRYPTO_MINING: "malicious.crypto_mining", + INJECTION_INSTRUCTIONS: "suspicious.prompt_injection_instructions", + SUSPICIOUS_INSTALL_SOURCE: "suspicious.install_untrusted_source", + MANIFEST_PRIVILEGED_ALWAYS: "suspicious.privileged_always", + KNOWN_BLOCKED_SIGNATURE: "malicious.known_blocked_signature", +} as const; + +const MALICIOUS_CODES = new Set([ + REASON_CODES.CREDENTIAL_HARVEST, + REASON_CODES.CRYPTO_MINING, + REASON_CODES.KNOWN_BLOCKED_SIGNATURE, +]); + +export function normalizeReasonCodes(codes: string[]) { + return Array.from(new Set(codes.filter(Boolean))).sort((a, b) => a.localeCompare(b)); +} + +export function summarizeReasonCodes(codes: string[]) { + if (codes.length === 0) return "No suspicious patterns detected."; + const top = codes.slice(0, 3).join(", "); + const extra = codes.length > 3 ? ` (+${codes.length - 3} more)` : ""; + return `Detected: ${top}${extra}`; +} + +export function verdictFromCodes(codes: string[]): ModerationVerdict { + const normalized = normalizeReasonCodes(codes); + if (normalized.some((code) => MALICIOUS_CODES.has(code) || code.startsWith("malicious."))) { + return "malicious"; + } + if (normalized.length > 0) return "suspicious"; + return "clean"; +} + +export function legacyFlagsFromVerdict(verdict: ModerationVerdict) { + if (verdict === "malicious") return ["blocked.malware"]; + if (verdict === "suspicious") return ["flagged.suspicious"]; + return undefined; +} diff --git a/convex/lib/public.ts b/convex/lib/public.ts index 9cf9d8390..eb37fa53a 100644 --- a/convex/lib/public.ts +++ b/convex/lib/public.ts @@ -1,45 +1,46 @@ -import type { Doc } from '../_generated/dataModel' +import type { Doc } from "../_generated/dataModel"; +import { resolveSkillVerdict } from "./moderationEngine"; export type PublicUser = Pick< - Doc<'users'>, - '_id' | '_creationTime' | 'handle' | 'name' | 'displayName' | 'image' | 'bio' -> + Doc<"users">, + "_id" | "_creationTime" | "handle" | "name" | "displayName" | "image" | "bio" +>; export type PublicSkill = Pick< - Doc<'skills'>, - | '_id' - | '_creationTime' - | 'slug' - | 'displayName' - | 'summary' - | 'ownerUserId' - | 'canonicalSkillId' - | 'forkOf' - | 'latestVersionId' - | 'tags' - | 'badges' - | 'stats' - | 'createdAt' - | 'updatedAt' -> + Doc<"skills">, + | "_id" + | "_creationTime" + | "slug" + | "displayName" + | "summary" + | "ownerUserId" + | "canonicalSkillId" + | "forkOf" + | "latestVersionId" + | "tags" + | "badges" + | "stats" + | "createdAt" + | "updatedAt" +>; export type PublicSoul = Pick< - Doc<'souls'>, - | '_id' - | '_creationTime' - | 'slug' - | 'displayName' - | 'summary' - | 'ownerUserId' - | 'latestVersionId' - | 'tags' - | 'stats' - | 'createdAt' - | 'updatedAt' -> + Doc<"souls">, + | "_id" + | "_creationTime" + | "slug" + | "displayName" + | "summary" + | "ownerUserId" + | "latestVersionId" + | "tags" + | "stats" + | "createdAt" + | "updatedAt" +>; -export function toPublicUser(user: Doc<'users'> | null | undefined): PublicUser | null { - if (!user || user.deletedAt || user.deactivatedAt) return null +export function toPublicUser(user: Doc<"users"> | null | undefined): PublicUser | null { + if (!user || user.deletedAt || user.deactivatedAt) return null; return { _id: user._id, _creationTime: user._creationTime, @@ -48,30 +49,35 @@ export function toPublicUser(user: Doc<'users'> | null | undefined): PublicUser displayName: user.displayName, image: user.image, bio: user.bio, - } + }; } -export function toPublicSkill(skill: Doc<'skills'> | null | undefined): PublicSkill | null { - if (!skill || skill.softDeletedAt) return null - if (skill.moderationStatus && skill.moderationStatus !== 'active') return null - if (skill.moderationFlags?.includes('blocked.malware')) return null +export function toPublicSkill(skill: Doc<"skills"> | null | undefined): PublicSkill | null { + if (!skill || skill.softDeletedAt) return null; + if (skill.moderationStatus && skill.moderationStatus !== "active") return null; + if ( + resolveSkillVerdict(skill) === "malicious" || + skill.moderationFlags?.includes("blocked.malware") + ) { + return null; + } const stats = { downloads: - typeof skill.statsDownloads === 'number' + typeof skill.statsDownloads === "number" ? skill.statsDownloads : (skill.stats?.downloads ?? 0), - stars: typeof skill.statsStars === 'number' ? skill.statsStars : (skill.stats?.stars ?? 0), + stars: typeof skill.statsStars === "number" ? skill.statsStars : (skill.stats?.stars ?? 0), installsCurrent: - typeof skill.statsInstallsCurrent === 'number' + typeof skill.statsInstallsCurrent === "number" ? skill.statsInstallsCurrent : (skill.stats?.installsCurrent ?? 0), installsAllTime: - typeof skill.statsInstallsAllTime === 'number' + typeof skill.statsInstallsAllTime === "number" ? skill.statsInstallsAllTime : (skill.stats?.installsAllTime ?? 0), versions: skill.stats?.versions ?? 0, comments: skill.stats?.comments ?? 0, - } + }; return { _id: skill._id, _creationTime: skill._creationTime, @@ -87,11 +93,11 @@ export function toPublicSkill(skill: Doc<'skills'> | null | undefined): PublicSk stats, createdAt: skill.createdAt, updatedAt: skill.updatedAt, - } + }; } -export function toPublicSoul(soul: Doc<'souls'> | null | undefined): PublicSoul | null { - if (!soul || soul.softDeletedAt) return null +export function toPublicSoul(soul: Doc<"souls"> | null | undefined): PublicSoul | null { + if (!soul || soul.softDeletedAt) return null; return { _id: soul._id, _creationTime: soul._creationTime, @@ -104,5 +110,5 @@ export function toPublicSoul(soul: Doc<'souls'> | null | undefined): PublicSoul stats: soul.stats, createdAt: soul.createdAt, updatedAt: soul.updatedAt, - } + }; } diff --git a/convex/lib/skillPublish.ts b/convex/lib/skillPublish.ts index 3479d79f9..e44ee0f88 100644 --- a/convex/lib/skillPublish.ts +++ b/convex/lib/skillPublish.ts @@ -1,21 +1,22 @@ -import { ConvexError } from 'convex/values' -import semver from 'semver' -import { api, internal } from '../_generated/api' -import type { Doc, Id } from '../_generated/dataModel' -import type { ActionCtx, MutationCtx } from '../_generated/server' -import { getSkillBadgeMap, isSkillHighlighted } from './badges' -import { generateChangelogForPublish } from './changelog' -import { generateEmbedding } from './embeddings' -import { requireGitHubAccountAge } from './githubAccount' -import type { PublicUser } from './public' +import { ConvexError } from "convex/values"; +import semver from "semver"; +import type { Doc, Id } from "../_generated/dataModel"; +import type { ActionCtx, MutationCtx } from "../_generated/server"; +import type { PublicUser } from "./public"; +import type { WebhookSkillPayload } from "./webhooks"; +import { api, internal } from "../_generated/api"; +import { getSkillBadgeMap, isSkillHighlighted } from "./badges"; +import { generateChangelogForPublish } from "./changelog"; +import { generateEmbedding } from "./embeddings"; +import { requireGitHubAccountAge } from "./githubAccount"; +import { runStaticModerationScan } from "./moderationEngine"; import { computeQualitySignals, evaluateQuality, getTrustTier, type QualityAssessment, toStructuralFingerprint, -} from './skillQuality' -import { generateSkillSummary } from './skillSummary' +} from "./skillQuality"; import { buildEmbeddingText, getFrontmatterMetadata, @@ -25,169 +26,169 @@ import { parseClawdisMetadata, parseFrontmatter, sanitizePath, -} from './skills' -import type { WebhookSkillPayload } from './webhooks' +} from "./skills"; +import { generateSkillSummary } from "./skillSummary"; -const MAX_TOTAL_BYTES = 50 * 1024 * 1024 -const MAX_FILES_FOR_EMBEDDING = 40 -const QUALITY_WINDOW_MS = 24 * 60 * 60 * 1000 -const QUALITY_ACTIVITY_LIMIT = 60 +const MAX_TOTAL_BYTES = 50 * 1024 * 1024; +const MAX_FILES_FOR_EMBEDDING = 40; +const QUALITY_WINDOW_MS = 24 * 60 * 60 * 1000; +const QUALITY_ACTIVITY_LIMIT = 60; export type PublishResult = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - embeddingId: Id<'skillEmbeddings'> -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + embeddingId: Id<"skillEmbeddings">; +}; export type PublishVersionArgs = { - slug: string - displayName: string - version: string - changelog: string - tags?: string[] - forkOf?: { slug: string; version?: string } + slug: string; + displayName: string; + version: string; + changelog: string; + tags?: string[]; + forkOf?: { slug: string; version?: string }; source?: { - kind: 'github' - url: string - repo: string - ref: string - commit: string - path: string - importedAt: number - } + kind: "github"; + url: string; + repo: string; + ref: string; + commit: string; + path: string; + importedAt: number; + }; files: Array<{ - path: string - size: number - storageId: Id<'_storage'> - sha256: string - contentType?: string - }> -} + path: string; + size: number; + storageId: Id<"_storage">; + sha256: string; + contentType?: string; + }>; +}; export type PublishOptions = { - bypassGitHubAccountAge?: boolean - bypassNewSkillRateLimit?: boolean - bypassQualityGate?: boolean - skipBackup?: boolean - skipWebhook?: boolean -} + bypassGitHubAccountAge?: boolean; + bypassNewSkillRateLimit?: boolean; + bypassQualityGate?: boolean; + skipBackup?: boolean; + skipWebhook?: boolean; +}; export async function publishVersionForUser( ctx: ActionCtx, - userId: Id<'users'>, + userId: Id<"users">, args: PublishVersionArgs, options: PublishOptions = {}, ): Promise { - const version = args.version.trim() - const slug = args.slug.trim().toLowerCase() - const displayName = args.displayName.trim() - if (!slug || !displayName) throw new ConvexError('Slug and display name required') + const version = args.version.trim(); + const slug = args.slug.trim().toLowerCase(); + const displayName = args.displayName.trim(); + if (!slug || !displayName) throw new ConvexError("Slug and display name required"); if (!/^[a-z0-9][a-z0-9-]*$/.test(slug)) { - throw new ConvexError('Slug must be lowercase and url-safe') + throw new ConvexError("Slug must be lowercase and url-safe"); } if (!semver.valid(version)) { - throw new ConvexError('Version must be valid semver') + throw new ConvexError("Version must be valid semver"); } if (!options.bypassGitHubAccountAge) { - await requireGitHubAccountAge(ctx, userId) + await requireGitHubAccountAge(ctx, userId); } const existingSkill = (await ctx.runQuery(internal.skills.getSkillBySlugInternal, { slug, - })) as Doc<'skills'> | null - const isNewSkill = !existingSkill + })) as Doc<"skills"> | null; + const isNewSkill = !existingSkill; - const suppliedChangelog = args.changelog.trim() - const changelogSource = suppliedChangelog ? ('user' as const) : ('auto' as const) + const suppliedChangelog = args.changelog.trim(); + const changelogSource = suppliedChangelog ? ("user" as const) : ("auto" as const); const sanitizedFiles = args.files.map((file) => ({ ...file, path: sanitizePath(file.path), - })) + })); if (sanitizedFiles.some((file) => !file.path)) { - throw new ConvexError('Invalid file paths') + throw new ConvexError("Invalid file paths"); } const safeFiles = sanitizedFiles.map((file) => ({ ...file, path: file.path as string, - })) + })); if (safeFiles.some((file) => !isTextFile(file.path, file.contentType ?? undefined))) { - throw new ConvexError('Only text-based files are allowed') + throw new ConvexError("Only text-based files are allowed"); } - const totalBytes = safeFiles.reduce((sum, file) => sum + file.size, 0) + const totalBytes = safeFiles.reduce((sum, file) => sum + file.size, 0); if (totalBytes > MAX_TOTAL_BYTES) { - throw new ConvexError('Skill bundle exceeds 50MB limit') + throw new ConvexError("Skill bundle exceeds 50MB limit"); } const readmeFile = safeFiles.find( - (file) => file.path?.toLowerCase() === 'skill.md' || file.path?.toLowerCase() === 'skills.md', - ) - if (!readmeFile) throw new ConvexError('SKILL.md is required') + (file) => file.path?.toLowerCase() === "skill.md" || file.path?.toLowerCase() === "skills.md", + ); + if (!readmeFile) throw new ConvexError("SKILL.md is required"); - const readmeText = await fetchText(ctx, readmeFile.storageId) - const frontmatter = parseFrontmatter(readmeText) - const clawdis = parseClawdisMetadata(frontmatter) + const readmeText = await fetchText(ctx, readmeFile.storageId); + const frontmatter = parseFrontmatter(readmeText); + const clawdis = parseClawdisMetadata(frontmatter); const owner = (await ctx.runQuery(internal.users.getByIdInternal, { userId, - })) as Doc<'users'> | null - const ownerCreatedAt = owner?.createdAt ?? owner?._creationTime ?? Date.now() - const now = Date.now() - const frontmatterMetadata = getFrontmatterMetadata(frontmatter) + })) as Doc<"users"> | null; + const ownerCreatedAt = owner?.createdAt ?? owner?._creationTime ?? Date.now(); + const now = Date.now(); + const frontmatterMetadata = getFrontmatterMetadata(frontmatter); // Check for description in metadata.description (nested) or description (direct frontmatter field) const metadataDescription = frontmatterMetadata && - typeof frontmatterMetadata === 'object' && + typeof frontmatterMetadata === "object" && !Array.isArray(frontmatterMetadata) && - typeof (frontmatterMetadata as Record).description === 'string' + typeof (frontmatterMetadata as Record).description === "string" ? ((frontmatterMetadata as Record).description as string) - : undefined - const directDescription = getFrontmatterValue(frontmatter, 'description') + : undefined; + const directDescription = getFrontmatterValue(frontmatter, "description"); // Prioritize the new description from frontmatter over the existing skill summary // This ensures updates to the description are reflected on subsequent publishes (#301) - const summaryFromFrontmatter = metadataDescription ?? directDescription + const summaryFromFrontmatter = metadataDescription ?? directDescription; const summary = await generateSkillSummary({ slug, displayName, readmeText, currentSummary: summaryFromFrontmatter ?? existingSkill?.summary ?? undefined, - }) + }); - let qualityAssessment: QualityAssessment | null = null + let qualityAssessment: QualityAssessment | null = null; if (isNewSkill && !options.bypassQualityGate) { const ownerActivity = (await ctx.runQuery(internal.skills.getOwnerSkillActivityInternal, { ownerUserId: userId, limit: QUALITY_ACTIVITY_LIMIT, })) as Array<{ - slug: string - summary?: string - createdAt: number - latestVersionId?: Id<'skillVersions'> - }> + slug: string; + summary?: string; + createdAt: number; + latestVersionId?: Id<"skillVersions">; + }>; - const trustTier = getTrustTier(now - ownerCreatedAt, ownerActivity.length) + const trustTier = getTrustTier(now - ownerCreatedAt, ownerActivity.length); const qualitySignals = computeQualitySignals({ readmeText, summary, - }) + }); const recentCandidates = ownerActivity.filter( (entry) => entry.slug !== slug && entry.createdAt >= now - QUALITY_WINDOW_MS && entry.latestVersionId, - ) - let similarRecentCount = 0 + ); + let similarRecentCount = 0; for (const entry of recentCandidates) { const version = (await ctx.runQuery(internal.skills.getVersionByIdInternal, { - versionId: entry.latestVersionId as Id<'skillVersions'>, - })) as Doc<'skillVersions'> | null - if (!version) continue + versionId: entry.latestVersionId as Id<"skillVersions">, + })) as Doc<"skillVersions"> | null; + if (!version) continue; const candidateReadmeFile = version.files.find((file) => { - const lower = file.path.toLowerCase() - return lower === 'skill.md' || lower === 'skills.md' - }) - if (!candidateReadmeFile) continue - const candidateText = await fetchText(ctx, candidateReadmeFile.storageId) + const lower = file.path.toLowerCase(); + return lower === "skill.md" || lower === "skills.md"; + }); + if (!candidateReadmeFile) continue; + const candidateText = await fetchText(ctx, candidateReadmeFile.storageId); if (toStructuralFingerprint(candidateText) === qualitySignals.structuralFingerprint) { - similarRecentCount += 1 + similarRecentCount += 1; } } @@ -195,52 +196,66 @@ export async function publishVersionForUser( signals: qualitySignals, trustTier, similarRecentCount, - }) - if (qualityAssessment.decision === 'reject') { - throw new ConvexError(qualityAssessment.reason) + }); + if (qualityAssessment.decision === "reject") { + throw new ConvexError(qualityAssessment.reason); } } - const metadata = mergeSourceIntoMetadata(frontmatterMetadata, args.source, qualityAssessment) + const metadata = mergeSourceIntoMetadata(frontmatterMetadata, args.source, qualityAssessment); - const otherFiles = [] as Array<{ path: string; content: string }> + const fileContents: Array<{ path: string; content: string }> = [ + { path: readmeFile.path, content: readmeText }, + ]; for (const file of safeFiles) { - if (!file.path || file.path.toLowerCase().endsWith('.md')) continue - if (!isTextFile(file.path, file.contentType ?? undefined)) continue - const content = await fetchText(ctx, file.storageId) - otherFiles.push({ path: file.path, content }) - if (otherFiles.length >= MAX_FILES_FOR_EMBEDDING) break + if (!file.path || file.storageId === readmeFile.storageId) continue; + if (!isTextFile(file.path, file.contentType ?? undefined)) continue; + const content = await fetchText(ctx, file.storageId); + fileContents.push({ path: file.path, content }); } + const otherFiles = fileContents + .filter((file) => !file.path.toLowerCase().endsWith(".md")) + .slice(0, MAX_FILES_FOR_EMBEDDING); + + const staticScan = runStaticModerationScan({ + slug, + displayName, + summary, + frontmatter, + metadata, + files: safeFiles.map((file) => ({ path: file.path, size: file.size })), + fileContents, + }); const embeddingText = buildEmbeddingText({ frontmatter, readme: readmeText, otherFiles, - }) + }); const fingerprintPromise = hashSkillFiles( safeFiles.map((file) => ({ path: file.path, sha256: file.sha256 })), - ) + ); const changelogPromise = - changelogSource === 'user' + changelogSource === "user" ? Promise.resolve(suppliedChangelog) : generateChangelogForPublish(ctx, { slug, version, readmeText, files: safeFiles.map((file) => ({ path: file.path, sha256: file.sha256 })), - }) + }); - const embeddingPromise = generateEmbedding(embeddingText) + const embeddingPromise = generateEmbedding(embeddingText); const [fingerprint, changelogText, embedding] = await Promise.all([ fingerprintPromise, changelogPromise, embeddingPromise.catch((error) => { - throw new ConvexError(formatEmbeddingError(error)) + throw new ConvexError(formatEmbeddingError(error)); }), - ]) + ]); const publishResult = (await ctx.runMutation(internal.skills.insertVersion, { userId, @@ -279,17 +294,18 @@ export async function publishVersionForUser( signals: qualityAssessment.signals, } : undefined, - })) as PublishResult + staticScan, + })) as PublishResult; await ctx.scheduler.runAfter(0, internal.vt.scanWithVirusTotal, { versionId: publishResult.versionId, - }) + }); await ctx.scheduler.runAfter(0, internal.llmEval.evaluateWithLlm, { versionId: publishResult.versionId, - }) + }); - const ownerHandle = owner?.handle ?? owner?.displayName ?? owner?.name ?? 'unknown' + const ownerHandle = owner?.handle ?? owner?.displayName ?? owner?.name ?? "unknown"; if (!options.skipBackup) { void ctx.scheduler @@ -302,8 +318,8 @@ export async function publishVersionForUser( publishedAt: Date.now(), }) .catch((error) => { - console.error('GitHub backup scheduling failed', error) - }) + console.error("GitHub backup scheduling failed", error); + }); } if (!options.skipWebhook) { @@ -311,21 +327,21 @@ export async function publishVersionForUser( slug, version, displayName, - }) + }); } - return publishResult + return publishResult; } function mergeSourceIntoMetadata( metadata: unknown, - source: PublishVersionArgs['source'], + source: PublishVersionArgs["source"], qualityAssessment: QualityAssessment | null = null, ) { const base = - metadata && typeof metadata === 'object' && !Array.isArray(metadata) + metadata && typeof metadata === "object" && !Array.isArray(metadata) ? { ...(metadata as Record) } - : {} + : {}; if (source) { base.source = { @@ -336,7 +352,7 @@ function mergeSourceIntoMetadata( commit: source.commit, path: source.path, importedAt: source.importedAt, - } + }; } if (qualityAssessment) { @@ -348,10 +364,10 @@ function mergeSourceIntoMetadata( signals: qualityAssessment.signals, reason: qualityAssessment.reason, evaluatedAt: Date.now(), - } + }; } - return Object.keys(base).length ? base : undefined + return Object.keys(base).length ? base : undefined; } export const __test = { @@ -359,15 +375,15 @@ export const __test = { computeQualitySignals, evaluateQuality, toStructuralFingerprint, -} +}; -export async function queueHighlightedWebhook(ctx: MutationCtx, skillId: Id<'skills'>) { - const skill = await ctx.db.get(skillId) - if (!skill) return - const owner = await ctx.db.get(skill.ownerUserId) - const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null +export async function queueHighlightedWebhook(ctx: MutationCtx, skillId: Id<"skills">) { + const skill = await ctx.db.get(skillId); + if (!skill) return; + const owner = await ctx.db.get(skill.ownerUserId); + const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null; - const badges = await getSkillBadgeMap(ctx, skillId) + const badges = await getSkillBadgeMap(ctx, skillId); const payload: WebhookSkillPayload = { slug: skill.slug, displayName: skill.displayName, @@ -376,33 +392,33 @@ export async function queueHighlightedWebhook(ctx: MutationCtx, skillId: Id<'ski ownerHandle: owner?.handle ?? owner?.name ?? undefined, highlighted: isSkillHighlighted({ badges }), tags: Object.keys(skill.tags ?? {}), - } + }; await ctx.scheduler.runAfter(0, internal.webhooks.sendDiscordWebhook, { - event: 'skill.highlighted', + event: "skill.highlighted", skill: payload, - }) + }); } export async function fetchText( - ctx: { storage: { get: (id: Id<'_storage'>) => Promise } }, - storageId: Id<'_storage'>, + ctx: { storage: { get: (id: Id<"_storage">) => Promise } }, + storageId: Id<"_storage">, ) { - const blob = await ctx.storage.get(storageId) - if (!blob) throw new Error('File missing in storage') - return blob.text() + const blob = await ctx.storage.get(storageId); + if (!blob) throw new Error("File missing in storage"); + return blob.text(); } function formatEmbeddingError(error: unknown) { if (error instanceof Error) { - if (error.message.includes('OPENAI_API_KEY')) { - return 'OPENAI_API_KEY is not configured.' + if (error.message.includes("OPENAI_API_KEY")) { + return "OPENAI_API_KEY is not configured."; } - if (error.message.startsWith('Embedding failed')) { - return error.message + if (error.message.startsWith("Embedding failed")) { + return error.message; } } - return 'Embedding failed. Please try again.' + return "Embedding failed. Please try again."; } async function schedulePublishWebhook( @@ -411,8 +427,8 @@ async function schedulePublishWebhook( ) { const result = (await ctx.runQuery(api.skills.getBySlug, { slug: params.slug, - })) as { skill: Doc<'skills'>; owner: PublicUser | null } | null - if (!result?.skill) return + })) as { skill: Doc<"skills">; owner: PublicUser | null } | null; + if (!result?.skill) return; const payload: WebhookSkillPayload = { slug: result.skill.slug, @@ -422,10 +438,10 @@ async function schedulePublishWebhook( ownerHandle: result.owner?.handle ?? result.owner?.name ?? undefined, highlighted: isSkillHighlighted(result.skill), tags: Object.keys(result.skill.tags ?? {}), - } + }; await ctx.scheduler.runAfter(0, internal.webhooks.sendDiscordWebhook, { - event: 'skill.publish', + event: "skill.publish", skill: payload, - }) + }); } diff --git a/convex/lib/skillSafety.ts b/convex/lib/skillSafety.ts index a0d56e8f2..ecc8de8ae 100644 --- a/convex/lib/skillSafety.ts +++ b/convex/lib/skillSafety.ts @@ -1,13 +1,18 @@ -import type { Doc } from '../_generated/dataModel' +import type { Doc } from "../_generated/dataModel"; +import { resolveSkillVerdict } from "./moderationEngine"; function isScannerSuspiciousReason(reason: string | undefined) { - if (!reason) return false - return reason.startsWith('scanner.') && reason.endsWith('.suspicious') + if (!reason) return false; + return reason.startsWith("scanner.") && reason.endsWith(".suspicious"); } export function isSkillSuspicious( - skill: Pick, 'moderationFlags' | 'moderationReason'>, + skill: Pick< + Doc<"skills">, + "moderationVerdict" | "moderationReasonCodes" | "moderationFlags" | "moderationReason" + >, ) { - if (skill.moderationFlags?.includes('flagged.suspicious')) return true - return isScannerSuspiciousReason(skill.moderationReason) + if (resolveSkillVerdict(skill) === "suspicious") return true; + if (skill.moderationFlags?.includes("flagged.suspicious")) return true; + return isScannerSuspiciousReason(skill.moderationReason); } diff --git a/convex/schema.ts b/convex/schema.ts index d86de7286..8d96123b4 100644 --- a/convex/schema.ts +++ b/convex/schema.ts @@ -1,7 +1,7 @@ -import { authTables } from '@convex-dev/auth/server' -import { defineSchema, defineTable } from 'convex/server' -import { v } from 'convex/values' -import { EMBEDDING_DIMENSIONS } from './lib/embeddings' +import { authTables } from "@convex-dev/auth/server"; +import { defineSchema, defineTable } from "convex/server"; +import { v } from "convex/values"; +import { EMBEDDING_DIMENSIONS } from "./lib/embeddings"; const users = defineTable({ name: v.optional(v.string()), @@ -14,7 +14,7 @@ const users = defineTable({ handle: v.optional(v.string()), displayName: v.optional(v.string()), bio: v.optional(v.string()), - role: v.optional(v.union(v.literal('admin'), v.literal('moderator'), v.literal('user'))), + role: v.optional(v.union(v.literal("admin"), v.literal("moderator"), v.literal("user"))), githubCreatedAt: v.optional(v.number()), githubFetchedAt: v.optional(v.number()), githubProfileSyncedAt: v.optional(v.number()), @@ -26,66 +26,86 @@ const users = defineTable({ createdAt: v.optional(v.number()), updatedAt: v.optional(v.number()), }) - .index('email', ['email']) - .index('phone', ['phone']) - .index('handle', ['handle']) + .index("email", ["email"]) + .index("phone", ["phone"]) + .index("handle", ["handle"]); const skills = defineTable({ slug: v.string(), displayName: v.string(), summary: v.optional(v.string()), resourceId: v.optional(v.string()), - ownerUserId: v.id('users'), - canonicalSkillId: v.optional(v.id('skills')), + ownerUserId: v.id("users"), + canonicalSkillId: v.optional(v.id("skills")), forkOf: v.optional( v.object({ - skillId: v.id('skills'), - kind: v.union(v.literal('fork'), v.literal('duplicate')), + skillId: v.id("skills"), + kind: v.union(v.literal("fork"), v.literal("duplicate")), version: v.optional(v.string()), at: v.number(), }), ), - latestVersionId: v.optional(v.id('skillVersions')), - tags: v.record(v.string(), v.id('skillVersions')), + latestVersionId: v.optional(v.id("skillVersions")), + tags: v.record(v.string(), v.id("skillVersions")), softDeletedAt: v.optional(v.number()), badges: v.optional( v.object({ redactionApproved: v.optional( v.object({ - byUserId: v.id('users'), + byUserId: v.id("users"), at: v.number(), }), ), highlighted: v.optional( v.object({ - byUserId: v.id('users'), + byUserId: v.id("users"), at: v.number(), }), ), official: v.optional( v.object({ - byUserId: v.id('users'), + byUserId: v.id("users"), at: v.number(), }), ), deprecated: v.optional( v.object({ - byUserId: v.id('users'), + byUserId: v.id("users"), at: v.number(), }), ), }), ), moderationStatus: v.optional( - v.union(v.literal('active'), v.literal('hidden'), v.literal('removed')), + v.union(v.literal("active"), v.literal("hidden"), v.literal("removed")), ), moderationNotes: v.optional(v.string()), moderationReason: v.optional(v.string()), + moderationVerdict: v.optional( + v.union(v.literal("clean"), v.literal("suspicious"), v.literal("malicious")), + ), + moderationReasonCodes: v.optional(v.array(v.string())), + moderationEvidence: v.optional( + v.array( + v.object({ + code: v.string(), + severity: v.union(v.literal("info"), v.literal("warn"), v.literal("critical")), + file: v.string(), + line: v.number(), + message: v.string(), + evidence: v.string(), + }), + ), + ), + moderationSummary: v.optional(v.string()), + moderationEngineVersion: v.optional(v.string()), + moderationEvaluatedAt: v.optional(v.number()), + moderationSourceVersionId: v.optional(v.id("skillVersions")), quality: v.optional( v.object({ score: v.number(), - decision: v.union(v.literal('pass'), v.literal('quarantine'), v.literal('reject')), - trustTier: v.union(v.literal('low'), v.literal('medium'), v.literal('trusted')), + decision: v.union(v.literal("pass"), v.literal("quarantine"), v.literal("reject")), + trustTier: v.union(v.literal("low"), v.literal("medium"), v.literal("trusted")), similarRecentCount: v.number(), reason: v.string(), signals: v.object({ @@ -107,7 +127,7 @@ const skills = defineTable({ scanLastCheckedAt: v.optional(v.number()), scanCheckCount: v.optional(v.number()), hiddenAt: v.optional(v.number()), - hiddenBy: v.optional(v.id('users')), + hiddenBy: v.optional(v.id("users")), reportCount: v.optional(v.number()), lastReportedAt: v.optional(v.number()), batch: v.optional(v.string()), @@ -126,34 +146,34 @@ const skills = defineTable({ createdAt: v.number(), updatedAt: v.number(), }) - .index('by_slug', ['slug']) - .index('by_owner', ['ownerUserId']) - .index('by_updated', ['updatedAt']) - .index('by_stats_downloads', ['statsDownloads', 'updatedAt']) - .index('by_stats_stars', ['statsStars', 'updatedAt']) - .index('by_stats_installs_current', ['statsInstallsCurrent', 'updatedAt']) - .index('by_stats_installs_all_time', ['statsInstallsAllTime', 'updatedAt']) - .index('by_batch', ['batch']) - .index('by_active_updated', ['softDeletedAt', 'updatedAt']) - .index('by_active_created', ['softDeletedAt', 'createdAt']) - .index('by_active_name', ['softDeletedAt', 'displayName']) - .index('by_active_stats_downloads', ['softDeletedAt', 'statsDownloads', 'updatedAt']) - .index('by_active_stats_stars', ['softDeletedAt', 'statsStars', 'updatedAt']) - .index('by_active_stats_installs_all_time', [ - 'softDeletedAt', - 'statsInstallsAllTime', - 'updatedAt', + .index("by_slug", ["slug"]) + .index("by_owner", ["ownerUserId"]) + .index("by_updated", ["updatedAt"]) + .index("by_stats_downloads", ["statsDownloads", "updatedAt"]) + .index("by_stats_stars", ["statsStars", "updatedAt"]) + .index("by_stats_installs_current", ["statsInstallsCurrent", "updatedAt"]) + .index("by_stats_installs_all_time", ["statsInstallsAllTime", "updatedAt"]) + .index("by_batch", ["batch"]) + .index("by_active_updated", ["softDeletedAt", "updatedAt"]) + .index("by_active_created", ["softDeletedAt", "createdAt"]) + .index("by_active_name", ["softDeletedAt", "displayName"]) + .index("by_active_stats_downloads", ["softDeletedAt", "statsDownloads", "updatedAt"]) + .index("by_active_stats_stars", ["softDeletedAt", "statsStars", "updatedAt"]) + .index("by_active_stats_installs_all_time", [ + "softDeletedAt", + "statsInstallsAllTime", + "updatedAt", ]) - .index('by_canonical', ['canonicalSkillId']) - .index('by_fork_of', ['forkOf.skillId']) + .index("by_canonical", ["canonicalSkillId"]) + .index("by_fork_of", ["forkOf.skillId"]); const souls = defineTable({ slug: v.string(), displayName: v.string(), summary: v.optional(v.string()), - ownerUserId: v.id('users'), - latestVersionId: v.optional(v.id('soulVersions')), - tags: v.record(v.string(), v.id('soulVersions')), + ownerUserId: v.id("users"), + latestVersionId: v.optional(v.id("soulVersions")), + tags: v.record(v.string(), v.id("soulVersions")), softDeletedAt: v.optional(v.number()), stats: v.object({ downloads: v.number(), @@ -164,21 +184,21 @@ const souls = defineTable({ createdAt: v.number(), updatedAt: v.number(), }) - .index('by_slug', ['slug']) - .index('by_owner', ['ownerUserId']) - .index('by_updated', ['updatedAt']) + .index("by_slug", ["slug"]) + .index("by_owner", ["ownerUserId"]) + .index("by_updated", ["updatedAt"]); const skillVersions = defineTable({ - skillId: v.id('skills'), + skillId: v.id("skills"), version: v.string(), fingerprint: v.optional(v.string()), changelog: v.string(), - changelogSource: v.optional(v.union(v.literal('auto'), v.literal('user'))), + changelogSource: v.optional(v.union(v.literal("auto"), v.literal("user"))), files: v.array( v.object({ path: v.string(), size: v.number(), - storageId: v.id('_storage'), + storageId: v.id("_storage"), sha256: v.string(), contentType: v.optional(v.string()), }), @@ -189,7 +209,7 @@ const skillVersions = defineTable({ clawdis: v.optional(v.any()), moltbot: v.optional(v.any()), }), - createdBy: v.id('users'), + createdBy: v.id("users"), createdAt: v.number(), softDeletedAt: v.optional(v.number()), sha256hash: v.optional(v.string()), @@ -224,22 +244,41 @@ const skillVersions = defineTable({ checkedAt: v.number(), }), ), + staticScan: v.optional( + v.object({ + status: v.union(v.literal("clean"), v.literal("suspicious"), v.literal("malicious")), + reasonCodes: v.array(v.string()), + findings: v.array( + v.object({ + code: v.string(), + severity: v.union(v.literal("info"), v.literal("warn"), v.literal("critical")), + file: v.string(), + line: v.number(), + message: v.string(), + evidence: v.string(), + }), + ), + summary: v.string(), + engineVersion: v.string(), + checkedAt: v.number(), + }), + ), }) - .index('by_skill', ['skillId']) - .index('by_skill_version', ['skillId', 'version']) - .index('by_sha256hash', ['sha256hash']) + .index("by_skill", ["skillId"]) + .index("by_skill_version", ["skillId", "version"]) + .index("by_sha256hash", ["sha256hash"]); const soulVersions = defineTable({ - soulId: v.id('souls'), + soulId: v.id("souls"), version: v.string(), fingerprint: v.optional(v.string()), changelog: v.string(), - changelogSource: v.optional(v.union(v.literal('auto'), v.literal('user'))), + changelogSource: v.optional(v.union(v.literal("auto"), v.literal("user"))), files: v.array( v.object({ path: v.string(), size: v.number(), - storageId: v.id('_storage'), + storageId: v.id("_storage"), sha256: v.string(), contentType: v.optional(v.string()), }), @@ -250,75 +289,75 @@ const soulVersions = defineTable({ clawdis: v.optional(v.any()), moltbot: v.optional(v.any()), }), - createdBy: v.id('users'), + createdBy: v.id("users"), createdAt: v.number(), softDeletedAt: v.optional(v.number()), }) - .index('by_soul', ['soulId']) - .index('by_soul_version', ['soulId', 'version']) + .index("by_soul", ["soulId"]) + .index("by_soul_version", ["soulId", "version"]); const skillVersionFingerprints = defineTable({ - skillId: v.id('skills'), - versionId: v.id('skillVersions'), + skillId: v.id("skills"), + versionId: v.id("skillVersions"), fingerprint: v.string(), createdAt: v.number(), }) - .index('by_version', ['versionId']) - .index('by_fingerprint', ['fingerprint']) - .index('by_skill_fingerprint', ['skillId', 'fingerprint']) + .index("by_version", ["versionId"]) + .index("by_fingerprint", ["fingerprint"]) + .index("by_skill_fingerprint", ["skillId", "fingerprint"]); const skillBadges = defineTable({ - skillId: v.id('skills'), + skillId: v.id("skills"), kind: v.union( - v.literal('highlighted'), - v.literal('official'), - v.literal('deprecated'), - v.literal('redactionApproved'), + v.literal("highlighted"), + v.literal("official"), + v.literal("deprecated"), + v.literal("redactionApproved"), ), - byUserId: v.id('users'), + byUserId: v.id("users"), at: v.number(), }) - .index('by_skill', ['skillId']) - .index('by_skill_kind', ['skillId', 'kind']) - .index('by_kind_at', ['kind', 'at']) + .index("by_skill", ["skillId"]) + .index("by_skill_kind", ["skillId", "kind"]) + .index("by_kind_at", ["kind", "at"]); const soulVersionFingerprints = defineTable({ - soulId: v.id('souls'), - versionId: v.id('soulVersions'), + soulId: v.id("souls"), + versionId: v.id("soulVersions"), fingerprint: v.string(), createdAt: v.number(), }) - .index('by_version', ['versionId']) - .index('by_fingerprint', ['fingerprint']) - .index('by_soul_fingerprint', ['soulId', 'fingerprint']) + .index("by_version", ["versionId"]) + .index("by_fingerprint", ["fingerprint"]) + .index("by_soul_fingerprint", ["soulId", "fingerprint"]); const skillEmbeddings = defineTable({ - skillId: v.id('skills'), - versionId: v.id('skillVersions'), - ownerId: v.id('users'), + skillId: v.id("skills"), + versionId: v.id("skillVersions"), + ownerId: v.id("users"), embedding: v.array(v.number()), isLatest: v.boolean(), isApproved: v.boolean(), visibility: v.string(), updatedAt: v.number(), }) - .index('by_skill', ['skillId']) - .index('by_version', ['versionId']) - .vectorIndex('by_embedding', { - vectorField: 'embedding', + .index("by_skill", ["skillId"]) + .index("by_version", ["versionId"]) + .vectorIndex("by_embedding", { + vectorField: "embedding", dimensions: EMBEDDING_DIMENSIONS, - filterFields: ['visibility'], - }) + filterFields: ["visibility"], + }); const skillDailyStats = defineTable({ - skillId: v.id('skills'), + skillId: v.id("skills"), day: v.number(), downloads: v.number(), installs: v.number(), updatedAt: v.number(), }) - .index('by_skill_day', ['skillId', 'day']) - .index('by_day', ['day']) + .index("by_skill_day", ["skillId", "day"]) + .index("by_day", ["day"]); const skillLeaderboards = defineTable({ kind: v.string(), @@ -327,33 +366,33 @@ const skillLeaderboards = defineTable({ rangeEndDay: v.number(), items: v.array( v.object({ - skillId: v.id('skills'), + skillId: v.id("skills"), score: v.number(), installs: v.number(), downloads: v.number(), }), ), -}).index('by_kind', ['kind', 'generatedAt']) +}).index("by_kind", ["kind", "generatedAt"]); const skillStatBackfillState = defineTable({ key: v.string(), cursor: v.optional(v.string()), doneAt: v.optional(v.number()), updatedAt: v.number(), -}).index('by_key', ['key']) +}).index("by_key", ["key"]); const skillStatEvents = defineTable({ - skillId: v.id('skills'), + skillId: v.id("skills"), kind: v.union( - v.literal('download'), - v.literal('star'), - v.literal('unstar'), - v.literal('comment'), - v.literal('uncomment'), - v.literal('install_new'), - v.literal('install_reactivate'), - v.literal('install_deactivate'), - v.literal('install_clear'), + v.literal("download"), + v.literal("star"), + v.literal("unstar"), + v.literal("comment"), + v.literal("uncomment"), + v.literal("install_new"), + v.literal("install_reactivate"), + v.literal("install_deactivate"), + v.literal("install_clear"), ), delta: v.optional( v.object({ @@ -364,97 +403,97 @@ const skillStatEvents = defineTable({ occurredAt: v.number(), processedAt: v.optional(v.number()), }) - .index('by_unprocessed', ['processedAt']) - .index('by_skill', ['skillId']) + .index("by_unprocessed", ["processedAt"]) + .index("by_skill", ["skillId"]); const skillStatUpdateCursors = defineTable({ key: v.string(), cursorCreationTime: v.optional(v.number()), updatedAt: v.number(), -}).index('by_key', ['key']) +}).index("by_key", ["key"]); const soulEmbeddings = defineTable({ - soulId: v.id('souls'), - versionId: v.id('soulVersions'), - ownerId: v.id('users'), + soulId: v.id("souls"), + versionId: v.id("soulVersions"), + ownerId: v.id("users"), embedding: v.array(v.number()), isLatest: v.boolean(), isApproved: v.boolean(), visibility: v.string(), updatedAt: v.number(), }) - .index('by_soul', ['soulId']) - .index('by_version', ['versionId']) - .vectorIndex('by_embedding', { - vectorField: 'embedding', + .index("by_soul", ["soulId"]) + .index("by_version", ["versionId"]) + .vectorIndex("by_embedding", { + vectorField: "embedding", dimensions: EMBEDDING_DIMENSIONS, - filterFields: ['visibility'], - }) + filterFields: ["visibility"], + }); const comments = defineTable({ - skillId: v.id('skills'), - userId: v.id('users'), + skillId: v.id("skills"), + userId: v.id("users"), body: v.string(), createdAt: v.number(), softDeletedAt: v.optional(v.number()), - deletedBy: v.optional(v.id('users')), + deletedBy: v.optional(v.id("users")), }) - .index('by_skill', ['skillId']) - .index('by_user', ['userId']) + .index("by_skill", ["skillId"]) + .index("by_user", ["userId"]); const skillReports = defineTable({ - skillId: v.id('skills'), - userId: v.id('users'), + skillId: v.id("skills"), + userId: v.id("users"), reason: v.optional(v.string()), createdAt: v.number(), }) - .index('by_skill', ['skillId']) - .index('by_skill_createdAt', ['skillId', 'createdAt']) - .index('by_user', ['userId']) - .index('by_skill_user', ['skillId', 'userId']) + .index("by_skill", ["skillId"]) + .index("by_skill_createdAt", ["skillId", "createdAt"]) + .index("by_user", ["userId"]) + .index("by_skill_user", ["skillId", "userId"]); const soulComments = defineTable({ - soulId: v.id('souls'), - userId: v.id('users'), + soulId: v.id("souls"), + userId: v.id("users"), body: v.string(), createdAt: v.number(), softDeletedAt: v.optional(v.number()), - deletedBy: v.optional(v.id('users')), + deletedBy: v.optional(v.id("users")), }) - .index('by_soul', ['soulId']) - .index('by_user', ['userId']) + .index("by_soul", ["soulId"]) + .index("by_user", ["userId"]); const stars = defineTable({ - skillId: v.id('skills'), - userId: v.id('users'), + skillId: v.id("skills"), + userId: v.id("users"), createdAt: v.number(), }) - .index('by_skill', ['skillId']) - .index('by_user', ['userId']) - .index('by_skill_user', ['skillId', 'userId']) + .index("by_skill", ["skillId"]) + .index("by_user", ["userId"]) + .index("by_skill_user", ["skillId", "userId"]); const soulStars = defineTable({ - soulId: v.id('souls'), - userId: v.id('users'), + soulId: v.id("souls"), + userId: v.id("users"), createdAt: v.number(), }) - .index('by_soul', ['soulId']) - .index('by_user', ['userId']) - .index('by_soul_user', ['soulId', 'userId']) + .index("by_soul", ["soulId"]) + .index("by_user", ["userId"]) + .index("by_soul_user", ["soulId", "userId"]); const auditLogs = defineTable({ - actorUserId: v.id('users'), + actorUserId: v.id("users"), action: v.string(), targetType: v.string(), targetId: v.string(), metadata: v.optional(v.any()), createdAt: v.number(), }) - .index('by_actor', ['actorUserId']) - .index('by_target', ['targetType', 'targetId']) + .index("by_actor", ["actorUserId"]) + .index("by_target", ["targetType", "targetId"]); const vtScanLogs = defineTable({ - type: v.union(v.literal('daily_rescan'), v.literal('backfill'), v.literal('pending_poll')), + type: v.union(v.literal("daily_rescan"), v.literal("backfill"), v.literal("pending_poll")), total: v.number(), updated: v.number(), unchanged: v.number(), @@ -469,10 +508,10 @@ const vtScanLogs = defineTable({ ), durationMs: v.number(), createdAt: v.number(), -}).index('by_type_date', ['type', 'createdAt']) +}).index("by_type_date", ["type", "createdAt"]); const apiTokens = defineTable({ - userId: v.id('users'), + userId: v.id("users"), label: v.string(), prefix: v.string(), tokenHash: v.string(), @@ -480,8 +519,8 @@ const apiTokens = defineTable({ lastUsedAt: v.optional(v.number()), revokedAt: v.optional(v.number()), }) - .index('by_user', ['userId']) - .index('by_hash', ['tokenHash']) + .index("by_user", ["userId"]) + .index("by_hash", ["tokenHash"]); const rateLimits = defineTable({ key: v.string(), @@ -490,74 +529,74 @@ const rateLimits = defineTable({ limit: v.number(), updatedAt: v.number(), }) - .index('by_key_window', ['key', 'windowStart']) - .index('by_key', ['key']) + .index("by_key_window", ["key", "windowStart"]) + .index("by_key", ["key"]); const downloadDedupes = defineTable({ - skillId: v.id('skills'), + skillId: v.id("skills"), identityHash: v.string(), hourStart: v.number(), createdAt: v.number(), }) - .index('by_skill_identity_hour', ['skillId', 'identityHash', 'hourStart']) - .index('by_hour', ['hourStart']) + .index("by_skill_identity_hour", ["skillId", "identityHash", "hourStart"]) + .index("by_hour", ["hourStart"]); const reservedSlugs = defineTable({ slug: v.string(), - originalOwnerUserId: v.id('users'), + originalOwnerUserId: v.id("users"), deletedAt: v.number(), expiresAt: v.number(), reason: v.optional(v.string()), releasedAt: v.optional(v.number()), }) - .index('by_slug', ['slug']) - .index('by_slug_active_deletedAt', ['slug', 'releasedAt', 'deletedAt']) - .index('by_owner', ['originalOwnerUserId']) - .index('by_expiry', ['expiresAt']) + .index("by_slug", ["slug"]) + .index("by_slug_active_deletedAt", ["slug", "releasedAt", "deletedAt"]) + .index("by_owner", ["originalOwnerUserId"]) + .index("by_expiry", ["expiresAt"]); const githubBackupSyncState = defineTable({ key: v.string(), cursor: v.optional(v.string()), updatedAt: v.number(), -}).index('by_key', ['key']) +}).index("by_key", ["key"]); const userSyncRoots = defineTable({ - userId: v.id('users'), + userId: v.id("users"), rootId: v.string(), label: v.string(), firstSeenAt: v.number(), lastSeenAt: v.number(), expiredAt: v.optional(v.number()), }) - .index('by_user', ['userId']) - .index('by_user_root', ['userId', 'rootId']) + .index("by_user", ["userId"]) + .index("by_user_root", ["userId", "rootId"]); const userSkillInstalls = defineTable({ - userId: v.id('users'), - skillId: v.id('skills'), + userId: v.id("users"), + skillId: v.id("skills"), firstSeenAt: v.number(), lastSeenAt: v.number(), activeRoots: v.number(), lastVersion: v.optional(v.string()), }) - .index('by_user', ['userId']) - .index('by_user_skill', ['userId', 'skillId']) - .index('by_skill', ['skillId']) + .index("by_user", ["userId"]) + .index("by_user_skill", ["userId", "skillId"]) + .index("by_skill", ["skillId"]); const userSkillRootInstalls = defineTable({ - userId: v.id('users'), + userId: v.id("users"), rootId: v.string(), - skillId: v.id('skills'), + skillId: v.id("skills"), firstSeenAt: v.number(), lastSeenAt: v.number(), lastVersion: v.optional(v.string()), removedAt: v.optional(v.number()), }) - .index('by_user', ['userId']) - .index('by_user_root', ['userId', 'rootId']) - .index('by_user_root_skill', ['userId', 'rootId', 'skillId']) - .index('by_user_skill', ['userId', 'skillId']) - .index('by_skill', ['skillId']) + .index("by_user", ["userId"]) + .index("by_user_root", ["userId", "rootId"]) + .index("by_user_root_skill", ["userId", "rootId", "skillId"]) + .index("by_user_skill", ["userId", "skillId"]) + .index("by_skill", ["skillId"]); export default defineSchema({ ...authTables, @@ -591,4 +630,4 @@ export default defineSchema({ userSyncRoots, userSkillInstalls, userSkillRootInstalls, -}) +}); diff --git a/convex/skills.ts b/convex/skills.ts index efebe3589..887c29665 100644 --- a/convex/skills.ts +++ b/convex/skills.ts @@ -1,9 +1,9 @@ -import { getAuthUserId } from '@convex-dev/auth/server' -import { paginationOptsValidator } from 'convex/server' -import { ConvexError, v } from 'convex/values' -import { internal } from './_generated/api' -import type { Doc, Id } from './_generated/dataModel' -import type { MutationCtx, QueryCtx } from './_generated/server' +import { getAuthUserId } from "@convex-dev/auth/server"; +import { paginationOptsValidator } from "convex/server"; +import { ConvexError, v } from "convex/values"; +import type { Doc, Id } from "./_generated/dataModel"; +import type { MutationCtx, QueryCtx } from "./_generated/server"; +import { internal } from "./_generated/api"; import { action, internalAction, @@ -11,541 +11,550 @@ import { internalQuery, mutation, query, -} from './_generated/server' -import { assertAdmin, assertModerator, requireUser, requireUserFromAction } from './lib/access' +} from "./_generated/server"; +import { assertAdmin, assertModerator, requireUser, requireUserFromAction } from "./lib/access"; import { getSkillBadgeMap, getSkillBadgeMaps, isSkillHighlighted, type SkillBadgeMap, -} from './lib/badges' -import { generateChangelogPreview as buildChangelogPreview } from './lib/changelog' +} from "./lib/badges"; +import { scheduleNextBatchIfNeeded } from "./lib/batching"; +import { generateChangelogPreview as buildChangelogPreview } from "./lib/changelog"; +import { embeddingVisibilityFor } from "./lib/embeddingVisibility"; import { canHealSkillOwnershipByGitHubProviderAccountId, getGitHubProviderAccountId, -} from './lib/githubIdentity' -import { buildTrendingLeaderboard } from './lib/leaderboards' -import { deriveModerationFlags } from './lib/moderation' -import { toPublicSkill, toPublicUser } from './lib/public' -import { embeddingVisibilityFor } from './lib/embeddingVisibility' -import { scheduleNextBatchIfNeeded } from './lib/batching' +} from "./lib/githubIdentity"; +import { buildTrendingLeaderboard } from "./lib/leaderboards"; +import { deriveModerationFlags } from "./lib/moderation"; +import { buildModerationSnapshot, resolveSkillVerdict } from "./lib/moderationEngine"; +import { + legacyFlagsFromVerdict, + summarizeReasonCodes, + type ModerationFinding, + verdictFromCodes, +} from "./lib/moderationReasonCodes"; +import { toPublicSkill, toPublicUser } from "./lib/public"; import { enforceReservedSlugCooldownForNewSkill, getLatestActiveReservedSlug, reserveSlugForHardDeleteFinalize, upsertReservedSlugForRightfulOwner, -} from './lib/reservedSlugs' +} from "./lib/reservedSlugs"; import { fetchText, type PublishResult, publishVersionForUser, queueHighlightedWebhook, -} from './lib/skillPublish' -import { isSkillSuspicious } from './lib/skillSafety' -import { getFrontmatterValue, hashSkillFiles } from './lib/skills' - -export { publishVersionForUser } from './lib/skillPublish' - -type ReadmeResult = { path: string; text: string } -type FileTextResult = { path: string; text: string; size: number; sha256: string } - -const MAX_DIFF_FILE_BYTES = 200 * 1024 -const MAX_LIST_LIMIT = 50 -const MAX_PUBLIC_LIST_LIMIT = 200 -const MAX_LIST_BULK_LIMIT = 200 -const MAX_LIST_TAKE = 1000 -const MAX_BADGE_LOOKUP_SKILLS = 200 -const HARD_DELETE_BATCH_SIZE = 100 -const HARD_DELETE_VERSION_BATCH_SIZE = 10 -const HARD_DELETE_LEADERBOARD_BATCH_SIZE = 25 -const BAN_USER_SKILLS_BATCH_SIZE = 25 -const MAX_ACTIVE_REPORTS_PER_USER = 20 -const AUTO_HIDE_REPORT_THRESHOLD = 3 -const MAX_REPORT_REASON_SAMPLE = 5 -const RATE_LIMIT_HOUR_MS = 60 * 60 * 1000 -const RATE_LIMIT_DAY_MS = 24 * RATE_LIMIT_HOUR_MS -const SLUG_RESERVATION_DAYS = 90 -const SLUG_RESERVATION_MS = SLUG_RESERVATION_DAYS * RATE_LIMIT_DAY_MS -const LOW_TRUST_ACCOUNT_AGE_MS = 30 * RATE_LIMIT_DAY_MS -const TRUSTED_PUBLISHER_SKILL_THRESHOLD = 10 -const LOW_TRUST_BURST_THRESHOLD_PER_HOUR = 8 -const OWNER_ACTIVITY_SCAN_LIMIT = 500 +} from "./lib/skillPublish"; +import { getFrontmatterValue, hashSkillFiles } from "./lib/skills"; +import { isSkillSuspicious } from "./lib/skillSafety"; + +export { publishVersionForUser } from "./lib/skillPublish"; + +type ReadmeResult = { path: string; text: string }; +type FileTextResult = { path: string; text: string; size: number; sha256: string }; + +const MAX_DIFF_FILE_BYTES = 200 * 1024; +const MAX_LIST_LIMIT = 50; +const MAX_PUBLIC_LIST_LIMIT = 200; +const MAX_LIST_BULK_LIMIT = 200; +const MAX_LIST_TAKE = 1000; +const MAX_BADGE_LOOKUP_SKILLS = 200; +const HARD_DELETE_BATCH_SIZE = 100; +const HARD_DELETE_VERSION_BATCH_SIZE = 10; +const HARD_DELETE_LEADERBOARD_BATCH_SIZE = 25; +const BAN_USER_SKILLS_BATCH_SIZE = 25; +const MAX_ACTIVE_REPORTS_PER_USER = 20; +const AUTO_HIDE_REPORT_THRESHOLD = 3; +const MAX_REPORT_REASON_SAMPLE = 5; +const RATE_LIMIT_HOUR_MS = 60 * 60 * 1000; +const RATE_LIMIT_DAY_MS = 24 * RATE_LIMIT_HOUR_MS; +const SLUG_RESERVATION_DAYS = 90; +const SLUG_RESERVATION_MS = SLUG_RESERVATION_DAYS * RATE_LIMIT_DAY_MS; +const LOW_TRUST_ACCOUNT_AGE_MS = 30 * RATE_LIMIT_DAY_MS; +const TRUSTED_PUBLISHER_SKILL_THRESHOLD = 10; +const LOW_TRUST_BURST_THRESHOLD_PER_HOUR = 8; +const OWNER_ACTIVITY_SCAN_LIMIT = 500; const NEW_SKILL_RATE_LIMITS = { lowTrust: { perHour: 5, perDay: 20 }, trusted: { perHour: 20, perDay: 80 }, -} as const +} as const; const SORT_INDEXES = { - newest: 'by_active_created', - updated: 'by_active_updated', - name: 'by_active_name', - downloads: 'by_active_stats_downloads', - stars: 'by_active_stats_stars', - installs: 'by_active_stats_installs_all_time', -} as const + newest: "by_active_created", + updated: "by_active_updated", + name: "by_active_name", + downloads: "by_active_stats_downloads", + stars: "by_active_stats_stars", + installs: "by_active_stats_installs_all_time", +} as const; function isSkillVersionId( - value: Id<'skillVersions'> | null | undefined, -): value is Id<'skillVersions'> { - return typeof value === 'string' && value.startsWith('skillVersions:') + value: Id<"skillVersions"> | null | undefined, +): value is Id<"skillVersions"> { + return typeof value === "string" && value.startsWith("skillVersions:"); } -function isUserId(value: Id<'users'> | null | undefined): value is Id<'users'> { - return typeof value === 'string' && value.startsWith('users:') +function isUserId(value: Id<"users"> | null | undefined): value is Id<"users"> { + return typeof value === "string" && value.startsWith("users:"); } type OwnerTrustSignals = { - isLowTrust: boolean - skillsLastHour: number - skillsLastDay: number -} - -function isPrivilegedOwnerForSuspiciousBypass(owner: Doc<'users'> | null | undefined) { - if (!owner) return false - return owner.role === 'admin' || owner.role === 'moderator' + isLowTrust: boolean; + skillsLastHour: number; + skillsLastDay: number; +}; + +function isPrivilegedOwnerForSuspiciousBypass(owner: Doc<"users"> | null | undefined) { + if (!owner) return false; + return owner.role === "admin" || owner.role === "moderator"; } function stripSuspiciousFlag(flags: string[] | undefined) { - if (!flags?.length) return undefined - const next = flags.filter((flag) => flag !== 'flagged.suspicious') - return next.length ? next : undefined + if (!flags?.length) return undefined; + const next = flags.filter((flag) => flag !== "flagged.suspicious"); + return next.length ? next : undefined; } function normalizeScannerSuspiciousReason(reason: string | undefined) { - if (!reason) return reason - if (!reason.startsWith('scanner.') || !reason.endsWith('.suspicious')) return reason - return `${reason.slice(0, -'.suspicious'.length)}.clean` + if (!reason) return reason; + if (!reason.startsWith("scanner.") || !reason.endsWith(".suspicious")) return reason; + return `${reason.slice(0, -".suspicious".length)}.clean`; } async function getOwnerTrustSignals( ctx: QueryCtx | MutationCtx, - owner: Doc<'users'>, + owner: Doc<"users">, now: number, ): Promise { const ownerSkills = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', owner._id)) - .order('desc') - .take(OWNER_ACTIVITY_SCAN_LIMIT) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", owner._id)) + .order("desc") + .take(OWNER_ACTIVITY_SCAN_LIMIT); - const hourThreshold = now - RATE_LIMIT_HOUR_MS - const dayThreshold = now - RATE_LIMIT_DAY_MS - let skillsLastHour = 0 - let skillsLastDay = 0 + const hourThreshold = now - RATE_LIMIT_HOUR_MS; + const dayThreshold = now - RATE_LIMIT_DAY_MS; + let skillsLastHour = 0; + let skillsLastDay = 0; for (const skill of ownerSkills) { if (skill.createdAt >= dayThreshold) { - skillsLastDay += 1 + skillsLastDay += 1; if (skill.createdAt >= hourThreshold) { - skillsLastHour += 1 + skillsLastHour += 1; } } } - const accountCreatedAt = owner.createdAt ?? owner._creationTime - const accountAgeMs = Math.max(0, now - accountCreatedAt) + const accountCreatedAt = owner.createdAt ?? owner._creationTime; + const accountAgeMs = Math.max(0, now - accountCreatedAt); const isLowTrust = accountAgeMs < LOW_TRUST_ACCOUNT_AGE_MS || ownerSkills.length < TRUSTED_PUBLISHER_SKILL_THRESHOLD || - skillsLastHour >= LOW_TRUST_BURST_THRESHOLD_PER_HOUR + skillsLastHour >= LOW_TRUST_BURST_THRESHOLD_PER_HOUR; - return { isLowTrust, skillsLastHour, skillsLastDay } + return { isLowTrust, skillsLastHour, skillsLastDay }; } function enforceNewSkillRateLimit(signals: OwnerTrustSignals) { - const limits = signals.isLowTrust ? NEW_SKILL_RATE_LIMITS.lowTrust : NEW_SKILL_RATE_LIMITS.trusted + const limits = signals.isLowTrust + ? NEW_SKILL_RATE_LIMITS.lowTrust + : NEW_SKILL_RATE_LIMITS.trusted; if (signals.skillsLastHour >= limits.perHour) { throw new ConvexError( `Rate limit: max ${limits.perHour} new skills per hour. Please wait before publishing more.`, - ) + ); } if (signals.skillsLastDay >= limits.perDay) { throw new ConvexError( `Rate limit: max ${limits.perDay} new skills per 24 hours. Please wait before publishing more.`, - ) + ); } } const HARD_DELETE_PHASES = [ - 'versions', - 'fingerprints', - 'embeddings', - 'comments', - 'reports', - 'stars', - 'badges', - 'dailyStats', - 'statEvents', - 'installs', - 'rootInstalls', - 'leaderboards', - 'canonical', - 'forks', - 'finalize', -] as const - -type HardDeletePhase = (typeof HARD_DELETE_PHASES)[number] + "versions", + "fingerprints", + "embeddings", + "comments", + "reports", + "stars", + "badges", + "dailyStats", + "statEvents", + "installs", + "rootInstalls", + "leaderboards", + "canonical", + "forks", + "finalize", +] as const; + +type HardDeletePhase = (typeof HARD_DELETE_PHASES)[number]; function isHardDeletePhase(value: string | undefined): value is HardDeletePhase { - if (!value) return false - return (HARD_DELETE_PHASES as readonly string[]).includes(value) + if (!value) return false; + return (HARD_DELETE_PHASES as readonly string[]).includes(value); } async function scheduleHardDelete( ctx: MutationCtx, - skillId: Id<'skills'>, - actorUserId: Id<'users'>, + skillId: Id<"skills">, + actorUserId: Id<"users">, phase: HardDeletePhase, ) { await ctx.scheduler.runAfter(0, internal.skills.hardDeleteInternal, { skillId, actorUserId, phase, - }) + }); } async function hardDeleteSkillStep( ctx: MutationCtx, - skill: Doc<'skills'>, - actorUserId: Id<'users'>, + skill: Doc<"skills">, + actorUserId: Id<"users">, phase: HardDeletePhase, ) { - const now = Date.now() - const patch: Partial> = {} - if (!skill.softDeletedAt) patch.softDeletedAt = now - if (skill.moderationStatus !== 'removed') patch.moderationStatus = 'removed' - if (!skill.hiddenAt) patch.hiddenAt = now - if (!skill.hiddenBy) patch.hiddenBy = actorUserId + const now = Date.now(); + const patch: Partial> = {}; + if (!skill.softDeletedAt) patch.softDeletedAt = now; + if (skill.moderationStatus !== "removed") patch.moderationStatus = "removed"; + if (!skill.hiddenAt) patch.hiddenAt = now; + if (!skill.hiddenBy) patch.hiddenBy = actorUserId; if (Object.keys(patch).length) { - patch.lastReviewedAt = now - patch.updatedAt = now - await ctx.db.patch(skill._id, patch) + patch.lastReviewedAt = now; + patch.updatedAt = now; + await ctx.db.patch(skill._id, patch); } switch (phase) { - case 'versions': { + case "versions": { const versions = await ctx.db - .query('skillVersions') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_VERSION_BATCH_SIZE) + .query("skillVersions") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_VERSION_BATCH_SIZE); for (const version of versions) { - await ctx.db.delete(version._id) + await ctx.db.delete(version._id); } if (versions.length === HARD_DELETE_VERSION_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'versions') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "versions"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'fingerprints') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "fingerprints"); + return; } - case 'fingerprints': { + case "fingerprints": { const fingerprints = await ctx.db - .query('skillVersionFingerprints') - .withIndex('by_skill_fingerprint', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillVersionFingerprints") + .withIndex("by_skill_fingerprint", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const fingerprint of fingerprints) { - await ctx.db.delete(fingerprint._id) + await ctx.db.delete(fingerprint._id); } if (fingerprints.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'fingerprints') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "fingerprints"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'embeddings') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "embeddings"); + return; } - case 'embeddings': { + case "embeddings": { const embeddings = await ctx.db - .query('skillEmbeddings') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillEmbeddings") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const embedding of embeddings) { - await ctx.db.delete(embedding._id) + await ctx.db.delete(embedding._id); } if (embeddings.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'embeddings') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "embeddings"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'comments') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "comments"); + return; } - case 'comments': { + case "comments": { const comments = await ctx.db - .query('comments') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("comments") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const comment of comments) { - await ctx.db.delete(comment._id) + await ctx.db.delete(comment._id); } if (comments.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'comments') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "comments"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'reports') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "reports"); + return; } - case 'reports': { + case "reports": { const reports = await ctx.db - .query('skillReports') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillReports") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const report of reports) { - await ctx.db.delete(report._id) + await ctx.db.delete(report._id); } if (reports.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'reports') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "reports"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'stars') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "stars"); + return; } - case 'stars': { + case "stars": { const stars = await ctx.db - .query('stars') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("stars") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const star of stars) { - await ctx.db.delete(star._id) + await ctx.db.delete(star._id); } if (stars.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'stars') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "stars"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'badges') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "badges"); + return; } - case 'badges': { + case "badges": { const badges = await ctx.db - .query('skillBadges') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillBadges") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const badge of badges) { - await ctx.db.delete(badge._id) + await ctx.db.delete(badge._id); } if (badges.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'badges') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "badges"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'dailyStats') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "dailyStats"); + return; } - case 'dailyStats': { + case "dailyStats": { const dailyStats = await ctx.db - .query('skillDailyStats') - .withIndex('by_skill_day', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillDailyStats") + .withIndex("by_skill_day", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const stat of dailyStats) { - await ctx.db.delete(stat._id) + await ctx.db.delete(stat._id); } if (dailyStats.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'dailyStats') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "dailyStats"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'statEvents') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "statEvents"); + return; } - case 'statEvents': { + case "statEvents": { const statEvents = await ctx.db - .query('skillStatEvents') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skillStatEvents") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const statEvent of statEvents) { - await ctx.db.delete(statEvent._id) + await ctx.db.delete(statEvent._id); } if (statEvents.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'statEvents') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "statEvents"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'installs') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "installs"); + return; } - case 'installs': { + case "installs": { const installs = await ctx.db - .query('userSkillInstalls') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("userSkillInstalls") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const install of installs) { - await ctx.db.delete(install._id) + await ctx.db.delete(install._id); } if (installs.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'installs') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "installs"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'rootInstalls') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "rootInstalls"); + return; } - case 'rootInstalls': { + case "rootInstalls": { const rootInstalls = await ctx.db - .query('userSkillRootInstalls') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("userSkillRootInstalls") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const rootInstall of rootInstalls) { - await ctx.db.delete(rootInstall._id) + await ctx.db.delete(rootInstall._id); } if (rootInstalls.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'rootInstalls') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "rootInstalls"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'leaderboards') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "leaderboards"); + return; } - case 'leaderboards': { + case "leaderboards": { const leaderboards = await ctx.db - .query('skillLeaderboards') - .take(HARD_DELETE_LEADERBOARD_BATCH_SIZE) + .query("skillLeaderboards") + .take(HARD_DELETE_LEADERBOARD_BATCH_SIZE); for (const leaderboard of leaderboards) { - const items = leaderboard.items.filter((item) => item.skillId !== skill._id) + const items = leaderboard.items.filter((item) => item.skillId !== skill._id); if (items.length !== leaderboard.items.length) { - await ctx.db.patch(leaderboard._id, { items }) + await ctx.db.patch(leaderboard._id, { items }); } } if (leaderboards.length === HARD_DELETE_LEADERBOARD_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'leaderboards') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "leaderboards"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'canonical') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "canonical"); + return; } - case 'canonical': { + case "canonical": { const canonicalRefs = await ctx.db - .query('skills') - .withIndex('by_canonical', (q) => q.eq('canonicalSkillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skills") + .withIndex("by_canonical", (q) => q.eq("canonicalSkillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const related of canonicalRefs) { await ctx.db.patch(related._id, { canonicalSkillId: undefined, updatedAt: now, - }) + }); } if (canonicalRefs.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'canonical') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "canonical"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'forks') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "forks"); + return; } - case 'forks': { + case "forks": { const forkRefs = await ctx.db - .query('skills') - .withIndex('by_fork_of', (q) => q.eq('forkOf.skillId', skill._id)) - .take(HARD_DELETE_BATCH_SIZE) + .query("skills") + .withIndex("by_fork_of", (q) => q.eq("forkOf.skillId", skill._id)) + .take(HARD_DELETE_BATCH_SIZE); for (const related of forkRefs) { await ctx.db.patch(related._id, { forkOf: undefined, updatedAt: now, - }) + }); } if (forkRefs.length === HARD_DELETE_BATCH_SIZE) { - await scheduleHardDelete(ctx, skill._id, actorUserId, 'forks') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "forks"); + return; } - await scheduleHardDelete(ctx, skill._id, actorUserId, 'finalize') - return + await scheduleHardDelete(ctx, skill._id, actorUserId, "finalize"); + return; } - case 'finalize': { + case "finalize": { await reserveSlugForHardDeleteFinalize(ctx, { slug: skill.slug, originalOwnerUserId: skill.ownerUserId, deletedAt: now, expiresAt: now + SLUG_RESERVATION_MS, - }) + }); - await ctx.db.delete(skill._id) - await ctx.db.insert('auditLogs', { + await ctx.db.delete(skill._id); + await ctx.db.insert("auditLogs", { actorUserId, - action: 'skill.hard_delete', - targetType: 'skill', + action: "skill.hard_delete", + targetType: "skill", targetId: skill._id, metadata: { slug: skill.slug }, createdAt: now, - }) - return + }); + return; } } } type PublicSkillEntry = { - skill: NonNullable> - latestVersion: PublicSkillListVersion | null - ownerHandle: string | null - owner: ReturnType | null -} + skill: NonNullable>; + latestVersion: PublicSkillListVersion | null; + ownerHandle: string | null; + owner: ReturnType | null; +}; type PublicSkillListVersion = Pick< - Doc<'skillVersions'>, - '_id' | '_creationTime' | 'version' | 'createdAt' | 'changelog' | 'changelogSource' + Doc<"skillVersions">, + "_id" | "_creationTime" | "version" | "createdAt" | "changelog" | "changelogSource" > & { parsed?: { clawdis?: { nix?: { - plugin?: boolean - } - } - } -} + plugin?: boolean; + }; + }; + }; +}; type ManagementSkillEntry = { - skill: Doc<'skills'> - latestVersion: Doc<'skillVersions'> | null - owner: Doc<'users'> | null -} + skill: Doc<"skills">; + latestVersion: Doc<"skillVersions"> | null; + owner: Doc<"users"> | null; +}; -type BadgeKind = Doc<'skillBadges'>['kind'] +type BadgeKind = Doc<"skillBadges">["kind"]; -async function buildPublicSkillEntries(ctx: QueryCtx, skills: Doc<'skills'>[]) { +async function buildPublicSkillEntries(ctx: QueryCtx, skills: Doc<"skills">[]) { const ownerInfoCache = new Map< - Id<'users'>, + Id<"users">, Promise<{ ownerHandle: string | null; owner: ReturnType | null }> - >() - const badgeMapBySkillId: Map, SkillBadgeMap> = skills.length <= + >(); + const badgeMapBySkillId: Map, SkillBadgeMap> = skills.length <= MAX_BADGE_LOOKUP_SKILLS ? await getSkillBadgeMaps( ctx, skills.map((skill) => skill._id), ) - : new Map() + : new Map(); - const getOwnerInfo = (ownerUserId: Id<'users'>) => { - const cached = ownerInfoCache.get(ownerUserId) - if (cached) return cached + const getOwnerInfo = (ownerUserId: Id<"users">) => { + const cached = ownerInfoCache.get(ownerUserId); + if (cached) return cached; const ownerPromise = ctx.db.get(ownerUserId).then((ownerDoc) => { if (!ownerDoc || ownerDoc.deletedAt || ownerDoc.deactivatedAt) { - return { ownerHandle: null, owner: null } + return { ownerHandle: null, owner: null }; } return { ownerHandle: ownerDoc.handle ?? (ownerDoc._id ? String(ownerDoc._id) : null), owner: toPublicUser(ownerDoc), - } - }) - ownerInfoCache.set(ownerUserId, ownerPromise) - return ownerPromise - } + }; + }); + ownerInfoCache.set(ownerUserId, ownerPromise); + return ownerPromise; + }; const entries = await Promise.all( skills.map(async (skill) => { const [latestVersionDoc, ownerInfo] = await Promise.all([ skill.latestVersionId ? ctx.db.get(skill.latestVersionId) : null, getOwnerInfo(skill.ownerUserId), - ]) - const badges = badgeMapBySkillId.get(skill._id) ?? {} - const publicSkill = toPublicSkill({ ...skill, badges }) - if (!publicSkill) return null - const latestVersion = toPublicSkillListVersion(latestVersionDoc) + ]); + const badges = badgeMapBySkillId.get(skill._id) ?? {}; + const publicSkill = toPublicSkill({ ...skill, badges }); + if (!publicSkill) return null; + const latestVersion = toPublicSkillListVersion(latestVersionDoc); return { skill: publicSkill, latestVersion, ownerHandle: ownerInfo.ownerHandle, owner: ownerInfo.owner, - } + }; }), - ) + ); - return entries.filter((entry): entry is PublicSkillEntry => entry !== null) + return entries.filter((entry): entry is PublicSkillEntry => entry !== null); } function toPublicSkillListVersion( - version: Doc<'skillVersions'> | null, + version: Doc<"skillVersions"> | null, ): PublicSkillListVersion | null { - if (!version) return null + if (!version) return null; return { _id: version._id, _creationTime: version._creationTime, @@ -554,95 +563,95 @@ function toPublicSkillListVersion( changelog: version.changelog, changelogSource: version.changelogSource, parsed: version.parsed?.clawdis ? { clawdis: version.parsed.clawdis } : undefined, - } + }; } -async function buildManagementSkillEntries(ctx: QueryCtx, skills: Doc<'skills'>[]) { - const ownerCache = new Map, Promise | null>>() +async function buildManagementSkillEntries(ctx: QueryCtx, skills: Doc<"skills">[]) { + const ownerCache = new Map, Promise | null>>(); const badgeMapBySkillId = await getSkillBadgeMaps( ctx, skills.map((skill) => skill._id), - ) - - const getOwner = (ownerUserId: Id<'users'>) => { - const cached = ownerCache.get(ownerUserId) - if (cached) return cached - const ownerPromise = ctx.db.get(ownerUserId) - ownerCache.set(ownerUserId, ownerPromise) - return ownerPromise - } + ); + + const getOwner = (ownerUserId: Id<"users">) => { + const cached = ownerCache.get(ownerUserId); + if (cached) return cached; + const ownerPromise = ctx.db.get(ownerUserId); + ownerCache.set(ownerUserId, ownerPromise); + return ownerPromise; + }; return Promise.all( skills.map(async (skill) => { const [latestVersion, owner] = await Promise.all([ skill.latestVersionId ? ctx.db.get(skill.latestVersionId) : null, getOwner(skill.ownerUserId), - ]) - const badges = badgeMapBySkillId.get(skill._id) ?? {} - return { skill: { ...skill, badges }, latestVersion, owner } + ]); + const badges = badgeMapBySkillId.get(skill._id) ?? {}; + return { skill: { ...skill, badges }, latestVersion, owner }; }), - ) satisfies Promise + ) satisfies Promise; } -async function attachBadgesToSkills(ctx: QueryCtx, skills: Doc<'skills'>[]) { +async function attachBadgesToSkills(ctx: QueryCtx, skills: Doc<"skills">[]) { const badgeMapBySkillId = await getSkillBadgeMaps( ctx, skills.map((skill) => skill._id), - ) + ); return skills.map((skill) => ({ ...skill, badges: badgeMapBySkillId.get(skill._id) ?? {}, - })) + })); } async function loadHighlightedSkills(ctx: QueryCtx, limit: number) { const entries = await ctx.db - .query('skillBadges') - .withIndex('by_kind_at', (q) => q.eq('kind', 'highlighted')) - .order('desc') - .take(MAX_LIST_TAKE) + .query("skillBadges") + .withIndex("by_kind_at", (q) => q.eq("kind", "highlighted")) + .order("desc") + .take(MAX_LIST_TAKE); - const skills: Doc<'skills'>[] = [] + const skills: Doc<"skills">[] = []; for (const badge of entries) { - const skill = await ctx.db.get(badge.skillId) - if (!skill || skill.softDeletedAt) continue - skills.push(skill) - if (skills.length >= limit) break + const skill = await ctx.db.get(badge.skillId); + if (!skill || skill.softDeletedAt) continue; + skills.push(skill); + if (skills.length >= limit) break; } - return skills + return skills; } async function upsertSkillBadge( ctx: MutationCtx, - skillId: Id<'skills'>, + skillId: Id<"skills">, kind: BadgeKind, - userId: Id<'users'>, + userId: Id<"users">, at: number, ) { const existing = await ctx.db - .query('skillBadges') - .withIndex('by_skill_kind', (q) => q.eq('skillId', skillId).eq('kind', kind)) - .unique() + .query("skillBadges") + .withIndex("by_skill_kind", (q) => q.eq("skillId", skillId).eq("kind", kind)) + .unique(); if (existing) { - await ctx.db.patch(existing._id, { byUserId: userId, at }) - return existing._id + await ctx.db.patch(existing._id, { byUserId: userId, at }); + return existing._id; } - return ctx.db.insert('skillBadges', { + return ctx.db.insert("skillBadges", { skillId, kind, byUserId: userId, at, - }) + }); } -async function removeSkillBadge(ctx: MutationCtx, skillId: Id<'skills'>, kind: BadgeKind) { +async function removeSkillBadge(ctx: MutationCtx, skillId: Id<"skills">, kind: BadgeKind) { const existing = await ctx.db - .query('skillBadges') - .withIndex('by_skill_kind', (q) => q.eq('skillId', skillId).eq('kind', kind)) - .unique() + .query("skillBadges") + .withIndex("by_skill_kind", (q) => q.eq("skillId", skillId).eq("kind", kind)) + .unique(); if (existing) { - await ctx.db.delete(existing._id) + await ctx.db.delete(existing._id); } } @@ -650,37 +659,41 @@ export const getBySlug = query({ args: { slug: v.string() }, handler: async (ctx, args) => { const skill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', args.slug)) - .unique() - if (!skill || skill.softDeletedAt) return null + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", args.slug)) + .unique(); + if (!skill || skill.softDeletedAt) return null; - const userId = await getAuthUserId(ctx) - const isOwner = Boolean(userId && userId === skill.ownerUserId) + const userId = await getAuthUserId(ctx); + const isOwner = Boolean(userId && userId === skill.ownerUserId); - const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null - const owner = await ctx.db.get(skill.ownerUserId) - const badges = await getSkillBadgeMap(ctx, skill._id) + const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null; + const owner = await ctx.db.get(skill.ownerUserId); + const badges = await getSkillBadgeMap(ctx, skill._id); - const forkOfSkill = skill.forkOf?.skillId ? await ctx.db.get(skill.forkOf.skillId) : null - const forkOfOwner = forkOfSkill ? await ctx.db.get(forkOfSkill.ownerUserId) : null + const forkOfSkill = skill.forkOf?.skillId ? await ctx.db.get(skill.forkOf.skillId) : null; + const forkOfOwner = forkOfSkill ? await ctx.db.get(forkOfSkill.ownerUserId) : null; - const canonicalSkill = skill.canonicalSkillId ? await ctx.db.get(skill.canonicalSkillId) : null - const canonicalOwner = canonicalSkill ? await ctx.db.get(canonicalSkill.ownerUserId) : null + const canonicalSkill = skill.canonicalSkillId ? await ctx.db.get(skill.canonicalSkillId) : null; + const canonicalOwner = canonicalSkill ? await ctx.db.get(canonicalSkill.ownerUserId) : null; - const publicSkill = toPublicSkill({ ...skill, badges }) + const publicSkill = toPublicSkill({ ...skill, badges }); // Determine moderation state const isPendingScan = - skill.moderationStatus === 'hidden' && skill.moderationReason === 'pending.scan' - const isMalwareBlocked = skill.moderationFlags?.includes('blocked.malware') ?? false - const isSuspicious = skill.moderationFlags?.includes('flagged.suspicious') ?? false - const isHiddenByMod = skill.moderationStatus === 'hidden' && !isPendingScan && !isMalwareBlocked - const isRemoved = skill.moderationStatus === 'removed' + skill.moderationStatus === "hidden" && skill.moderationReason === "pending.scan"; + const verdict = resolveSkillVerdict(skill); + const isMalwareBlocked = + verdict === "malicious" || (skill.moderationFlags?.includes("blocked.malware") ?? false); + const isSuspicious = + verdict === "suspicious" || (skill.moderationFlags?.includes("flagged.suspicious") ?? false); + const isHiddenByMod = + skill.moderationStatus === "hidden" && !isPendingScan && !isMalwareBlocked; + const isRemoved = skill.moderationStatus === "removed"; // Non-owners can see malware-blocked skills (transparency), but not other hidden states // Owners can see all their moderated skills - if (!publicSkill && !isOwner && !isMalwareBlocked) return null + if (!publicSkill && !isOwner && !isMalwareBlocked) return null; // For owners viewing their moderated skill, construct the response manually const skillData = publicSkill ?? { @@ -698,10 +711,10 @@ export const getBySlug = query({ stats: skill.stats, createdAt: skill.createdAt, updatedAt: skill.updatedAt, - } + }; // Moderation info - visible to owners for all states, or anyone for flagged skills (transparency) - const showModerationInfo = isOwner || isMalwareBlocked || isSuspicious + const showModerationInfo = isOwner || isMalwareBlocked || isSuspicious; const moderationInfo = showModerationInfo ? { isPendingScan, @@ -709,9 +722,14 @@ export const getBySlug = query({ isSuspicious, isHiddenByMod, isRemoved, + verdict, + reasonCodes: skill.moderationReasonCodes ?? [], + summary: skill.moderationSummary, + engineVersion: skill.moderationEngineVersion, + updatedAt: skill.moderationEvaluatedAt, reason: isOwner ? skill.moderationReason : undefined, } - : null + : null; return { skill: skillData, @@ -721,7 +739,7 @@ export const getBySlug = query({ moderationInfo, forkOf: forkOfSkill ? { - kind: skill.forkOf?.kind ?? 'fork', + kind: skill.forkOf?.kind ?? "fork", version: skill.forkOf?.version ?? null, skill: { slug: forkOfSkill.slug, @@ -745,31 +763,31 @@ export const getBySlug = query({ }, } : null, - } + }; }, -}) +}); export const getBySlugForStaff = query({ args: { slug: v.string() }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) + const { user } = await requireUser(ctx); + assertModerator(user); const skill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', args.slug)) - .unique() - if (!skill) return null + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", args.slug)) + .unique(); + if (!skill) return null; - const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null - const owner = toPublicUser(await ctx.db.get(skill.ownerUserId)) - const badges = await getSkillBadgeMap(ctx, skill._id) + const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null; + const owner = toPublicUser(await ctx.db.get(skill.ownerUserId)); + const badges = await getSkillBadgeMap(ctx, skill._id); - const forkOfSkill = skill.forkOf?.skillId ? await ctx.db.get(skill.forkOf.skillId) : null - const forkOfOwner = forkOfSkill ? await ctx.db.get(forkOfSkill.ownerUserId) : null + const forkOfSkill = skill.forkOf?.skillId ? await ctx.db.get(skill.forkOf.skillId) : null; + const forkOfOwner = forkOfSkill ? await ctx.db.get(forkOfSkill.ownerUserId) : null; - const canonicalSkill = skill.canonicalSkillId ? await ctx.db.get(skill.canonicalSkillId) : null - const canonicalOwner = canonicalSkill ? await ctx.db.get(canonicalSkill.ownerUserId) : null + const canonicalSkill = skill.canonicalSkillId ? await ctx.db.get(skill.canonicalSkillId) : null; + const canonicalOwner = canonicalSkill ? await ctx.db.get(canonicalSkill.ownerUserId) : null; return { skill: { ...skill, badges }, @@ -777,7 +795,7 @@ export const getBySlugForStaff = query({ owner, forkOf: forkOfSkill ? { - kind: skill.forkOf?.kind ?? 'fork', + kind: skill.forkOf?.kind ?? "fork", version: skill.forkOf?.version ?? null, skill: { slug: forkOfSkill.slug, @@ -801,99 +819,99 @@ export const getBySlugForStaff = query({ }, } : null, - } + }; }, -}) +}); export const getReservedSlugInternal = internalQuery({ args: { slug: v.string() }, handler: async (ctx, args) => { - return getLatestActiveReservedSlug(ctx, args.slug) + return getLatestActiveReservedSlug(ctx, args.slug); }, -}) +}); export const getSkillBySlugInternal = internalQuery({ args: { slug: v.string() }, handler: async (ctx, args) => { return ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', args.slug)) - .unique() + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", args.slug)) + .unique(); }, -}) +}); export const getOwnerSkillActivityInternal = internalQuery({ args: { - ownerUserId: v.id('users'), + ownerUserId: v.id("users"), limit: v.optional(v.number()), }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 60, 1, 500) + const limit = clampInt(args.limit ?? 60, 1, 500); const skills = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', args.ownerUserId)) - .order('desc') - .take(limit) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", args.ownerUserId)) + .order("desc") + .take(limit); return skills.map((skill) => ({ slug: skill.slug, summary: skill.summary, createdAt: skill.createdAt, latestVersionId: skill.latestVersionId, - })) + })); }, -}) +}); export const clearOwnerSuspiciousFlagsInternal = internalMutation({ args: { - ownerUserId: v.id('users'), + ownerUserId: v.id("users"), limit: v.optional(v.number()), }, handler: async (ctx, args) => { - const owner = await ctx.db.get(args.ownerUserId) - if (!owner || owner.deletedAt || owner.deactivatedAt) throw new Error('Owner not found') + const owner = await ctx.db.get(args.ownerUserId); + if (!owner || owner.deletedAt || owner.deactivatedAt) throw new Error("Owner not found"); if (!isPrivilegedOwnerForSuspiciousBypass(owner)) { - return { inspected: 0, updated: 0, skipped: 'owner_not_privileged' as const } + return { inspected: 0, updated: 0, skipped: "owner_not_privileged" as const }; } - const limit = clampInt(args.limit ?? 500, 1, 5000) + const limit = clampInt(args.limit ?? 500, 1, 5000); const skills = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', args.ownerUserId)) - .order('desc') - .take(limit) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", args.ownerUserId)) + .order("desc") + .take(limit); - let updated = 0 - const now = Date.now() + let updated = 0; + const now = Date.now(); for (const skill of skills) { - const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? [] - const hasSuspiciousFlag = existingFlags.includes('flagged.suspicious') + const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? []; + const hasSuspiciousFlag = existingFlags.includes("flagged.suspicious"); const hasSuspiciousReason = - skill.moderationReason?.startsWith('scanner.') && - skill.moderationReason.endsWith('.suspicious') - if (!hasSuspiciousFlag && !hasSuspiciousReason) continue + skill.moderationReason?.startsWith("scanner.") && + skill.moderationReason.endsWith(".suspicious"); + if (!hasSuspiciousFlag && !hasSuspiciousReason) continue; - const patch: Partial> = { updatedAt: now } - patch.moderationFlags = stripSuspiciousFlag(existingFlags) + const patch: Partial> = { updatedAt: now }; + patch.moderationFlags = stripSuspiciousFlag(existingFlags); if (hasSuspiciousReason) { - patch.moderationReason = normalizeScannerSuspiciousReason(skill.moderationReason) + patch.moderationReason = normalizeScannerSuspiciousReason(skill.moderationReason); } if ( - (skill.moderationStatus ?? 'active') === 'hidden' && + (skill.moderationStatus ?? "active") === "hidden" && hasSuspiciousReason && !skill.softDeletedAt ) { - patch.moderationStatus = 'active' + patch.moderationStatus = "active"; } - await ctx.db.patch(skill._id, patch) - updated += 1 + await ctx.db.patch(skill._id, patch); + updated += 1; } - return { inspected: skills.length, updated } + return { inspected: skills.length, updated }; }, -}) +}); /** * Get quick stats without loading versions (fast). @@ -901,24 +919,24 @@ export const clearOwnerSuspiciousFlagsInternal = internalMutation({ export const getQuickStatsInternal = internalQuery({ args: {}, handler: async (ctx) => { - const allSkills = await ctx.db.query('skills').collect() - const active = allSkills.filter((s) => !s.softDeletedAt) + const allSkills = await ctx.db.query("skills").collect(); + const active = allSkills.filter((s) => !s.softDeletedAt); - const byStatus: Record = {} - const byReason: Record = {} + const byStatus: Record = {}; + const byReason: Record = {}; for (const skill of active) { - const status = skill.moderationStatus ?? 'active' - byStatus[status] = (byStatus[status] ?? 0) + 1 + const status = skill.moderationStatus ?? "active"; + byStatus[status] = (byStatus[status] ?? 0) + 1; if (skill.moderationReason) { - byReason[skill.moderationReason] = (byReason[skill.moderationReason] ?? 0) + 1 + byReason[skill.moderationReason] = (byReason[skill.moderationReason] ?? 0) + 1; } } - return { total: active.length, byStatus, byReason } + return { total: active.length, byStatus, byReason }; }, -}) +}); /** * Get aggregate stats for all skills (for social posts, dashboards, etc.) @@ -930,206 +948,206 @@ export const getQuickStatsInternal = internalQuery({ export const getStatsPageInternal = internalQuery({ args: { cursor: v.optional(v.number()) }, handler: async (ctx, args) => { - const PAGE_SIZE = 500 - const cursor = args.cursor ?? 0 + const PAGE_SIZE = 500; + const cursor = args.cursor ?? 0; const page = await ctx.db - .query('skills') - .filter((q) => q.gt(q.field('_creationTime'), cursor)) - .order('asc') - .take(PAGE_SIZE) + .query("skills") + .filter((q) => q.gt(q.field("_creationTime"), cursor)) + .order("asc") + .take(PAGE_SIZE); - let total = 0 - const byStatus: Record = {} - const byReason: Record = {} - const byFlags: Record = {} - const vtStats = { clean: 0, suspicious: 0, malicious: 0, pending: 0, noAnalysis: 0 } + let total = 0; + const byStatus: Record = {}; + const byReason: Record = {}; + const byFlags: Record = {}; + const vtStats = { clean: 0, suspicious: 0, malicious: 0, pending: 0, noAnalysis: 0 }; for (const skill of page) { - if (skill.softDeletedAt) continue - total++ + if (skill.softDeletedAt) continue; + total++; - const status = skill.moderationStatus ?? 'active' - byStatus[status] = (byStatus[status] ?? 0) + 1 + const status = skill.moderationStatus ?? "active"; + byStatus[status] = (byStatus[status] ?? 0) + 1; if (skill.moderationReason) { - byReason[skill.moderationReason] = (byReason[skill.moderationReason] ?? 0) + 1 + byReason[skill.moderationReason] = (byReason[skill.moderationReason] ?? 0) + 1; } for (const flag of skill.moderationFlags ?? []) { - byFlags[flag] = (byFlags[flag] ?? 0) + 1 + byFlags[flag] = (byFlags[flag] ?? 0) + 1; } - if (status === 'active') { - const reason = skill.moderationReason + if (status === "active") { + const reason = skill.moderationReason; if (!reason) { - vtStats.noAnalysis++ - } else if (reason === 'scanner.vt.clean') { - vtStats.clean++ - } else if (reason === 'scanner.vt.malicious') { - vtStats.malicious++ - } else if (reason === 'scanner.vt.suspicious') { - vtStats.suspicious++ - } else if (reason === 'scanner.vt.pending' || reason === 'pending.scan') { - vtStats.pending++ - } else if (reason.startsWith('scanner.vt-rescan.')) { - const suffix = reason.slice('scanner.vt-rescan.'.length) - if (suffix === 'clean') vtStats.clean++ - else if (suffix === 'malicious') vtStats.malicious++ - else if (suffix === 'suspicious') vtStats.suspicious++ - else vtStats.pending++ + vtStats.noAnalysis++; + } else if (reason === "scanner.vt.clean") { + vtStats.clean++; + } else if (reason === "scanner.vt.malicious") { + vtStats.malicious++; + } else if (reason === "scanner.vt.suspicious") { + vtStats.suspicious++; + } else if (reason === "scanner.vt.pending" || reason === "pending.scan") { + vtStats.pending++; + } else if (reason.startsWith("scanner.vt-rescan.")) { + const suffix = reason.slice("scanner.vt-rescan.".length); + if (suffix === "clean") vtStats.clean++; + else if (suffix === "malicious") vtStats.malicious++; + else if (suffix === "suspicious") vtStats.suspicious++; + else vtStats.pending++; } else { - vtStats.noAnalysis++ + vtStats.noAnalysis++; } } } - const nextCursor = page.length > 0 ? page[page.length - 1]._creationTime : null - const done = page.length < PAGE_SIZE + const nextCursor = page.length > 0 ? page[page.length - 1]._creationTime : null; + const done = page.length < PAGE_SIZE; - return { total, byStatus, byReason, byFlags, vtStats, nextCursor, done } + return { total, byStatus, byReason, byFlags, vtStats, nextCursor, done }; }, -}) +}); export const getHighlightedCountInternal = internalQuery({ args: {}, handler: async (ctx) => { const badges = await ctx.db - .query('skillBadges') - .withIndex('by_kind_at', (q) => q.eq('kind', 'highlighted')) - .collect() - return badges.length + .query("skillBadges") + .withIndex("by_kind_at", (q) => q.eq("kind", "highlighted")) + .collect(); + return badges.length; }, -}) +}); /** * Get aggregate stats for all skills (for social posts, dashboards, etc.) * Uses an action to call paginated queries, avoiding the 16MB byte limit. */ type StatsResult = { - total: number - highlighted: number - byStatus: Record - byReason: Record - byFlags: Record + total: number; + highlighted: number; + byStatus: Record; + byReason: Record; + byFlags: Record; vtStats: { - clean: number - suspicious: number - malicious: number - pending: number - noAnalysis: number - } -} + clean: number; + suspicious: number; + malicious: number; + pending: number; + noAnalysis: number; + }; +}; export const getStatsInternal = internalAction({ args: {}, handler: async (ctx): Promise => { - let total = 0 - const byStatus: Record = {} - const byReason: Record = {} - const byFlags: Record = {} - const vtStats = { clean: 0, suspicious: 0, malicious: 0, pending: 0, noAnalysis: 0 } + let total = 0; + const byStatus: Record = {}; + const byReason: Record = {}; + const byFlags: Record = {}; + const vtStats = { clean: 0, suspicious: 0, malicious: 0, pending: 0, noAnalysis: 0 }; - let cursor: number | undefined - let done = false + let cursor: number | undefined; + let done = false; while (!done) { const page: { - total: number - byStatus: Record - byReason: Record - byFlags: Record + total: number; + byStatus: Record; + byReason: Record; + byFlags: Record; vtStats: { - clean: number - suspicious: number - malicious: number - pending: number - noAnalysis: number - } - nextCursor: number | null - done: boolean - } = await ctx.runQuery(internal.skills.getStatsPageInternal, { cursor }) - - total += page.total + clean: number; + suspicious: number; + malicious: number; + pending: number; + noAnalysis: number; + }; + nextCursor: number | null; + done: boolean; + } = await ctx.runQuery(internal.skills.getStatsPageInternal, { cursor }); + + total += page.total; for (const [k, cnt] of Object.entries(page.byStatus)) { - byStatus[k] = (byStatus[k] ?? 0) + cnt + byStatus[k] = (byStatus[k] ?? 0) + cnt; } for (const [k, cnt] of Object.entries(page.byReason)) { - byReason[k] = (byReason[k] ?? 0) + cnt + byReason[k] = (byReason[k] ?? 0) + cnt; } for (const [k, cnt] of Object.entries(page.byFlags)) { - byFlags[k] = (byFlags[k] ?? 0) + cnt + byFlags[k] = (byFlags[k] ?? 0) + cnt; } - vtStats.clean += page.vtStats.clean - vtStats.suspicious += page.vtStats.suspicious - vtStats.malicious += page.vtStats.malicious - vtStats.pending += page.vtStats.pending - vtStats.noAnalysis += page.vtStats.noAnalysis + vtStats.clean += page.vtStats.clean; + vtStats.suspicious += page.vtStats.suspicious; + vtStats.malicious += page.vtStats.malicious; + vtStats.pending += page.vtStats.pending; + vtStats.noAnalysis += page.vtStats.noAnalysis; - done = page.done + done = page.done; if (page.nextCursor !== null) { - cursor = page.nextCursor + cursor = page.nextCursor; } } - const highlighted: number = await ctx.runQuery(internal.skills.getHighlightedCountInternal, {}) + const highlighted: number = await ctx.runQuery(internal.skills.getHighlightedCountInternal, {}); - return { total, highlighted, byStatus, byReason, byFlags, vtStats } + return { total, highlighted, byStatus, byReason, byFlags, vtStats }; }, -}) +}); export const list = query({ args: { batch: v.optional(v.string()), - ownerUserId: v.optional(v.id('users')), + ownerUserId: v.optional(v.id("users")), limit: v.optional(v.number()), }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 24, 1, MAX_LIST_BULK_LIMIT) - const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE) + const limit = clampInt(args.limit ?? 24, 1, MAX_LIST_BULK_LIMIT); + const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE); if (args.batch) { - if (args.batch === 'highlighted') { - const skills = await loadHighlightedSkills(ctx, limit) - const withBadges = await attachBadgesToSkills(ctx, skills) + if (args.batch === "highlighted") { + const skills = await loadHighlightedSkills(ctx, limit); + const withBadges = await attachBadgesToSkills(ctx, skills); return withBadges .map((skill) => toPublicSkill(skill)) - .filter((skill): skill is NonNullable => Boolean(skill)) + .filter((skill): skill is NonNullable => Boolean(skill)); } const entries = await ctx.db - .query('skills') - .withIndex('by_batch', (q) => q.eq('batch', args.batch)) - .order('desc') - .take(takeLimit) - const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit) - const withBadges = await attachBadgesToSkills(ctx, filtered) + .query("skills") + .withIndex("by_batch", (q) => q.eq("batch", args.batch)) + .order("desc") + .take(takeLimit); + const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit); + const withBadges = await attachBadgesToSkills(ctx, filtered); return withBadges .map((skill) => toPublicSkill(skill)) - .filter((skill): skill is NonNullable => Boolean(skill)) + .filter((skill): skill is NonNullable => Boolean(skill)); } - const ownerUserId = args.ownerUserId + const ownerUserId = args.ownerUserId; if (ownerUserId) { - const userId = await getAuthUserId(ctx) - const isOwnDashboard = Boolean(userId && userId === ownerUserId) + const userId = await getAuthUserId(ctx); + const isOwnDashboard = Boolean(userId && userId === ownerUserId); const entries = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', ownerUserId)) - .order('desc') - .take(takeLimit) - const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit) - const withBadges = await attachBadgesToSkills(ctx, filtered) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", ownerUserId)) + .order("desc") + .take(takeLimit); + const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit); + const withBadges = await attachBadgesToSkills(ctx, filtered); if (isOwnDashboard) { // For owner's own dashboard, include pending skills return withBadges .map((skill) => { - const publicSkill = toPublicSkill(skill) - if (publicSkill) return publicSkill + const publicSkill = toPublicSkill(skill); + if (publicSkill) return publicSkill; // Include pending skills for owner const isPending = - skill.moderationStatus === 'hidden' && skill.moderationReason === 'pending.scan' + skill.moderationStatus === "hidden" && skill.moderationReason === "pending.scan"; if (isPending) { // Use computed badges from attachBadgesToSkills, not stored skill.badges - const { badges } = skill + const { badges } = skill; return { _id: skill._id, _creationTime: skill._creationTime, @@ -1146,91 +1164,91 @@ export const list = query({ createdAt: skill.createdAt, updatedAt: skill.updatedAt, pendingReview: true as const, - } + }; } - return null + return null; }) - .filter((skill): skill is NonNullable => Boolean(skill)) + .filter((skill): skill is NonNullable => Boolean(skill)); } return withBadges .map((skill) => toPublicSkill(skill)) - .filter((skill): skill is NonNullable => Boolean(skill)) + .filter((skill): skill is NonNullable => Boolean(skill)); } - const entries = await ctx.db.query('skills').order('desc').take(takeLimit) - const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit) - const withBadges = await attachBadgesToSkills(ctx, filtered) + const entries = await ctx.db.query("skills").order("desc").take(takeLimit); + const filtered = entries.filter((skill) => !skill.softDeletedAt).slice(0, limit); + const withBadges = await attachBadgesToSkills(ctx, filtered); return withBadges .map((skill) => toPublicSkill(skill)) - .filter((skill): skill is NonNullable => Boolean(skill)) + .filter((skill): skill is NonNullable => Boolean(skill)); }, -}) +}); export const listWithLatest = query({ args: { batch: v.optional(v.string()), - ownerUserId: v.optional(v.id('users')), + ownerUserId: v.optional(v.id("users")), limit: v.optional(v.number()), }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 24, 1, MAX_LIST_BULK_LIMIT) - const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE) - let entries: Doc<'skills'>[] = [] + const limit = clampInt(args.limit ?? 24, 1, MAX_LIST_BULK_LIMIT); + const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE); + let entries: Doc<"skills">[] = []; if (args.batch) { - if (args.batch === 'highlighted') { - entries = await loadHighlightedSkills(ctx, limit) + if (args.batch === "highlighted") { + entries = await loadHighlightedSkills(ctx, limit); } else { entries = await ctx.db - .query('skills') - .withIndex('by_batch', (q) => q.eq('batch', args.batch)) - .order('desc') - .take(takeLimit) + .query("skills") + .withIndex("by_batch", (q) => q.eq("batch", args.batch)) + .order("desc") + .take(takeLimit); } } else if (args.ownerUserId) { - const ownerUserId = args.ownerUserId + const ownerUserId = args.ownerUserId; entries = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', ownerUserId)) - .order('desc') - .take(takeLimit) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", ownerUserId)) + .order("desc") + .take(takeLimit); } else { - entries = await ctx.db.query('skills').order('desc').take(takeLimit) + entries = await ctx.db.query("skills").order("desc").take(takeLimit); } - const filtered = entries.filter((skill) => !skill.softDeletedAt) - const withBadges = await attachBadgesToSkills(ctx, filtered) + const filtered = entries.filter((skill) => !skill.softDeletedAt); + const withBadges = await attachBadgesToSkills(ctx, filtered); const ordered = - args.batch === 'highlighted' + args.batch === "highlighted" ? [...withBadges].sort( (a, b) => (b.badges?.highlighted?.at ?? 0) - (a.badges?.highlighted?.at ?? 0), ) - : withBadges - const limited = ordered.slice(0, limit) + : withBadges; + const limited = ordered.slice(0, limit); const items = await Promise.all( limited.map(async (skill) => ({ skill: toPublicSkill(skill), latestVersion: skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null, })), - ) + ); return items.filter( ( item, ): item is { - skill: NonNullable> - latestVersion: Doc<'skillVersions'> | null + skill: NonNullable>; + latestVersion: Doc<"skillVersions"> | null; } => Boolean(item.skill), - ) + ); }, -}) +}); export const listHighlightedPublic = query({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 12, 1, MAX_PUBLIC_LIST_LIMIT) - const skills = await loadHighlightedSkills(ctx, limit) - return buildPublicSkillEntries(ctx, skills) + const limit = clampInt(args.limit ?? 12, 1, MAX_PUBLIC_LIST_LIMIT); + const skills = await loadHighlightedSkills(ctx, limit); + return buildPublicSkillEntries(ctx, skills); }, -}) +}); export const listForManagement = query({ args: { @@ -1238,248 +1256,248 @@ export const listForManagement = query({ includeDeleted: v.optional(v.boolean()), }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const limit = clampInt(args.limit ?? 50, 1, MAX_LIST_BULK_LIMIT) - const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE) - const entries = await ctx.db.query('skills').order('desc').take(takeLimit) + const { user } = await requireUser(ctx); + assertModerator(user); + const limit = clampInt(args.limit ?? 50, 1, MAX_LIST_BULK_LIMIT); + const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE); + const entries = await ctx.db.query("skills").order("desc").take(takeLimit); const filtered = ( args.includeDeleted ? entries : entries.filter((skill) => !skill.softDeletedAt) - ).slice(0, limit) - return buildManagementSkillEntries(ctx, filtered) + ).slice(0, limit); + return buildManagementSkillEntries(ctx, filtered); }, -}) +}); export const listRecentVersions = query({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_BULK_LIMIT) + const { user } = await requireUser(ctx); + assertModerator(user); + const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_BULK_LIMIT); const versions = await ctx.db - .query('skillVersions') - .order('desc') - .take(limit * 2) - const entries = versions.filter((version) => !version.softDeletedAt).slice(0, limit) + .query("skillVersions") + .order("desc") + .take(limit * 2); + const entries = versions.filter((version) => !version.softDeletedAt).slice(0, limit); const results: Array<{ - version: Doc<'skillVersions'> - skill: Doc<'skills'> | null - owner: Doc<'users'> | null - }> = [] + version: Doc<"skillVersions">; + skill: Doc<"skills"> | null; + owner: Doc<"users"> | null; + }> = []; for (const version of entries) { - const skill = await ctx.db.get(version.skillId) + const skill = await ctx.db.get(version.skillId); if (!skill) { - results.push({ version, skill: null, owner: null }) - continue + results.push({ version, skill: null, owner: null }); + continue; } - const owner = await ctx.db.get(skill.ownerUserId) - results.push({ version, skill, owner }) + const owner = await ctx.db.get(skill.ownerUserId); + results.push({ version, skill, owner }); } - return results + return results; }, -}) +}); export const listReportedSkills = query({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const limit = clampInt(args.limit ?? 25, 1, MAX_LIST_BULK_LIMIT) - const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE) - const entries = await ctx.db.query('skills').order('desc').take(takeLimit) + const { user } = await requireUser(ctx); + assertModerator(user); + const limit = clampInt(args.limit ?? 25, 1, MAX_LIST_BULK_LIMIT); + const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE); + const entries = await ctx.db.query("skills").order("desc").take(takeLimit); const reported = entries .filter((skill) => (skill.reportCount ?? 0) > 0) .sort((a, b) => (b.lastReportedAt ?? 0) - (a.lastReportedAt ?? 0)) - .slice(0, limit) - const managementEntries = await buildManagementSkillEntries(ctx, reported) - const reporterCache = new Map, Promise | null>>() - - const getReporter = (reporterId: Id<'users'>) => { - const cached = reporterCache.get(reporterId) - if (cached) return cached - const reporterPromise = ctx.db.get(reporterId) - reporterCache.set(reporterId, reporterPromise) - return reporterPromise - } + .slice(0, limit); + const managementEntries = await buildManagementSkillEntries(ctx, reported); + const reporterCache = new Map, Promise | null>>(); + + const getReporter = (reporterId: Id<"users">) => { + const cached = reporterCache.get(reporterId); + if (cached) return cached; + const reporterPromise = ctx.db.get(reporterId); + reporterCache.set(reporterId, reporterPromise); + return reporterPromise; + }; return Promise.all( managementEntries.map(async (entry) => { const reports = await ctx.db - .query('skillReports') - .withIndex('by_skill_createdAt', (q) => q.eq('skillId', entry.skill._id)) - .order('desc') - .take(MAX_REPORT_REASON_SAMPLE) + .query("skillReports") + .withIndex("by_skill_createdAt", (q) => q.eq("skillId", entry.skill._id)) + .order("desc") + .take(MAX_REPORT_REASON_SAMPLE); const reportEntries = await Promise.all( reports.map(async (report) => { - const reporter = await getReporter(report.userId) - const reason = report.reason?.trim() + const reporter = await getReporter(report.userId); + const reason = report.reason?.trim(); return { - reason: reason && reason.length > 0 ? reason : 'No reason provided.', + reason: reason && reason.length > 0 ? reason : "No reason provided.", createdAt: report.createdAt, reporterHandle: reporter?.handle ?? reporter?.name ?? null, reporterId: report.userId, - } + }; }), - ) - return { ...entry, reports: reportEntries } + ); + return { ...entry, reports: reportEntries }; }), - ) + ); }, -}) +}); export const listDuplicateCandidates = query({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_BULK_LIMIT) - const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE) - const skills = await ctx.db.query('skills').order('desc').take(takeLimit) - const entries = skills.filter((skill) => !skill.softDeletedAt).slice(0, limit) + const { user } = await requireUser(ctx); + assertModerator(user); + const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_BULK_LIMIT); + const takeLimit = Math.min(limit * 5, MAX_LIST_TAKE); + const skills = await ctx.db.query("skills").order("desc").take(takeLimit); + const entries = skills.filter((skill) => !skill.softDeletedAt).slice(0, limit); const results: Array<{ - skill: Doc<'skills'> - latestVersion: Doc<'skillVersions'> | null - fingerprint: string | null - matches: Array<{ skill: Doc<'skills'>; owner: Doc<'users'> | null }> - owner: Doc<'users'> | null - }> = [] + skill: Doc<"skills">; + latestVersion: Doc<"skillVersions"> | null; + fingerprint: string | null; + matches: Array<{ skill: Doc<"skills">; owner: Doc<"users"> | null }>; + owner: Doc<"users"> | null; + }> = []; for (const skill of entries) { const latestVersion = isSkillVersionId(skill.latestVersionId) ? await ctx.db.get(skill.latestVersionId) - : null - const fingerprint = latestVersion?.fingerprint ?? null - if (!fingerprint) continue + : null; + const fingerprint = latestVersion?.fingerprint ?? null; + if (!fingerprint) continue; - let matchedFingerprints: Doc<'skillVersionFingerprints'>[] = [] + let matchedFingerprints: Doc<"skillVersionFingerprints">[] = []; try { matchedFingerprints = await ctx.db - .query('skillVersionFingerprints') - .withIndex('by_fingerprint', (q) => q.eq('fingerprint', fingerprint)) - .take(10) + .query("skillVersionFingerprints") + .withIndex("by_fingerprint", (q) => q.eq("fingerprint", fingerprint)) + .take(10); } catch (error) { - console.error('listDuplicateCandidates: fingerprint lookup failed', error) - continue + console.error("listDuplicateCandidates: fingerprint lookup failed", error); + continue; } - const matchEntries: Array<{ skill: Doc<'skills'>; owner: Doc<'users'> | null }> = [] + const matchEntries: Array<{ skill: Doc<"skills">; owner: Doc<"users"> | null }> = []; for (const match of matchedFingerprints) { - if (match.skillId === skill._id) continue - const matchSkill = await ctx.db.get(match.skillId) - if (!matchSkill || matchSkill.softDeletedAt) continue - const matchOwner = await ctx.db.get(matchSkill.ownerUserId) - matchEntries.push({ skill: matchSkill, owner: matchOwner }) + if (match.skillId === skill._id) continue; + const matchSkill = await ctx.db.get(match.skillId); + if (!matchSkill || matchSkill.softDeletedAt) continue; + const matchOwner = await ctx.db.get(matchSkill.ownerUserId); + matchEntries.push({ skill: matchSkill, owner: matchOwner }); } - if (matchEntries.length === 0) continue + if (matchEntries.length === 0) continue; - const owner = isUserId(skill.ownerUserId) ? await ctx.db.get(skill.ownerUserId) : null + const owner = isUserId(skill.ownerUserId) ? await ctx.db.get(skill.ownerUserId) : null; results.push({ skill, latestVersion, fingerprint, matches: matchEntries, owner, - }) + }); } - return results + return results; }, -}) +}); -async function countActiveReportsForUser(ctx: MutationCtx, userId: Id<'users'>) { +async function countActiveReportsForUser(ctx: MutationCtx, userId: Id<"users">) { const reports = await ctx.db - .query('skillReports') - .withIndex('by_user', (q) => q.eq('userId', userId)) - .collect() + .query("skillReports") + .withIndex("by_user", (q) => q.eq("userId", userId)) + .collect(); - let count = 0 + let count = 0; for (const report of reports) { - const skill = await ctx.db.get(report.skillId) - if (!skill) continue - if (skill.softDeletedAt) continue - if (skill.moderationStatus === 'removed') continue - const owner = await ctx.db.get(skill.ownerUserId) - if (!owner || owner.deletedAt || owner.deactivatedAt) continue - count += 1 - if (count >= MAX_ACTIVE_REPORTS_PER_USER) break + const skill = await ctx.db.get(report.skillId); + if (!skill) continue; + if (skill.softDeletedAt) continue; + if (skill.moderationStatus === "removed") continue; + const owner = await ctx.db.get(skill.ownerUserId); + if (!owner || owner.deletedAt || owner.deactivatedAt) continue; + count += 1; + if (count >= MAX_ACTIVE_REPORTS_PER_USER) break; } - return count + return count; } export const report = mutation({ - args: { skillId: v.id('skills'), reason: v.string() }, + args: { skillId: v.id("skills"), reason: v.string() }, handler: async (ctx, args) => { - const { userId } = await requireUser(ctx) - const skill = await ctx.db.get(args.skillId) - if (!skill || skill.softDeletedAt || skill.moderationStatus === 'removed') { - throw new Error('Skill not found') + const { userId } = await requireUser(ctx); + const skill = await ctx.db.get(args.skillId); + if (!skill || skill.softDeletedAt || skill.moderationStatus === "removed") { + throw new Error("Skill not found"); } - const reason = args.reason.trim() + const reason = args.reason.trim(); if (!reason) { - throw new Error('Report reason required.') + throw new Error("Report reason required."); } const existing = await ctx.db - .query('skillReports') - .withIndex('by_skill_user', (q) => q.eq('skillId', args.skillId).eq('userId', userId)) - .unique() - if (existing) return { ok: true as const, reported: false, alreadyReported: true } + .query("skillReports") + .withIndex("by_skill_user", (q) => q.eq("skillId", args.skillId).eq("userId", userId)) + .unique(); + if (existing) return { ok: true as const, reported: false, alreadyReported: true }; - const activeReports = await countActiveReportsForUser(ctx, userId) + const activeReports = await countActiveReportsForUser(ctx, userId); if (activeReports >= MAX_ACTIVE_REPORTS_PER_USER) { - throw new Error('Report limit reached. Please wait for moderation before reporting more.') + throw new Error("Report limit reached. Please wait for moderation before reporting more."); } - const now = Date.now() - await ctx.db.insert('skillReports', { + const now = Date.now(); + await ctx.db.insert("skillReports", { skillId: args.skillId, userId, reason: reason.slice(0, 500), createdAt: now, - }) + }); - const nextReportCount = (skill.reportCount ?? 0) + 1 - const shouldAutoHide = nextReportCount > AUTO_HIDE_REPORT_THRESHOLD && !skill.softDeletedAt - const updates: Partial> = { + const nextReportCount = (skill.reportCount ?? 0) + 1; + const shouldAutoHide = nextReportCount > AUTO_HIDE_REPORT_THRESHOLD && !skill.softDeletedAt; + const updates: Partial> = { reportCount: nextReportCount, lastReportedAt: now, updatedAt: now, - } + }; if (shouldAutoHide) { Object.assign(updates, { softDeletedAt: now, - moderationStatus: 'hidden', - moderationReason: 'auto.reports', - moderationNotes: 'Auto-hidden after 4 unique reports.', + moderationStatus: "hidden", + moderationReason: "auto.reports", + moderationNotes: "Auto-hidden after 4 unique reports.", hiddenAt: now, lastReviewedAt: now, - }) + }); } - await ctx.db.patch(skill._id, updates) + await ctx.db.patch(skill._id, updates); if (shouldAutoHide) { - await setSkillEmbeddingsSoftDeleted(ctx, skill._id, true, now) + await setSkillEmbeddingsSoftDeleted(ctx, skill._id, true, now); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: userId, - action: 'skill.auto_hide', - targetType: 'skill', + action: "skill.auto_hide", + targetType: "skill", targetId: skill._id, metadata: { reportCount: nextReportCount }, createdAt: now, - }) + }); } - return { ok: true as const, reported: true, alreadyReported: false } + return { ok: true as const, reported: true, alreadyReported: false }; }, -}) +}); // TODO: Delete listPublicPage once all clients have migrated to listPublicPageV2 export const listPublicPage = query({ @@ -1488,59 +1506,59 @@ export const listPublicPage = query({ limit: v.optional(v.number()), sort: v.optional( v.union( - v.literal('updated'), - v.literal('downloads'), - v.literal('stars'), - v.literal('installsCurrent'), - v.literal('installsAllTime'), - v.literal('trending'), + v.literal("updated"), + v.literal("downloads"), + v.literal("stars"), + v.literal("installsCurrent"), + v.literal("installsAllTime"), + v.literal("trending"), ), ), }, handler: async (ctx, args) => { - const sort = args.sort ?? 'updated' - const limit = clampInt(args.limit ?? 24, 1, MAX_PUBLIC_LIST_LIMIT) + const sort = args.sort ?? "updated"; + const limit = clampInt(args.limit ?? 24, 1, MAX_PUBLIC_LIST_LIMIT); - if (sort === 'updated') { + if (sort === "updated") { const { page, isDone, continueCursor } = await ctx.db - .query('skills') - .withIndex('by_updated', (q) => q) - .order('desc') - .paginate({ cursor: args.cursor ?? null, numItems: limit }) + .query("skills") + .withIndex("by_updated", (q) => q) + .order("desc") + .paginate({ cursor: args.cursor ?? null, numItems: limit }); - const skills = page.filter((skill) => !skill.softDeletedAt) - const items = await buildPublicSkillEntries(ctx, skills) + const skills = page.filter((skill) => !skill.softDeletedAt); + const items = await buildPublicSkillEntries(ctx, skills); - return { items, nextCursor: isDone ? null : continueCursor } + return { items, nextCursor: isDone ? null : continueCursor }; } - if (sort === 'trending') { - const entries = await getTrendingEntries(ctx, limit) - const skills: Doc<'skills'>[] = [] + if (sort === "trending") { + const entries = await getTrendingEntries(ctx, limit); + const skills: Doc<"skills">[] = []; for (const entry of entries) { - const skill = await ctx.db.get(entry.skillId) - if (!skill || skill.softDeletedAt) continue - skills.push(skill) - if (skills.length >= limit) break + const skill = await ctx.db.get(entry.skillId); + if (!skill || skill.softDeletedAt) continue; + skills.push(skill); + if (skills.length >= limit) break; } - const items = await buildPublicSkillEntries(ctx, skills) - return { items, nextCursor: null } + const items = await buildPublicSkillEntries(ctx, skills); + return { items, nextCursor: null }; } - const index = sortToIndex(sort) + const index = sortToIndex(sort); const { page, isDone, continueCursor } = await ctx.db - .query('skills') + .query("skills") .withIndex(index, (q) => q) - .order('desc') - .paginate({ cursor: args.cursor ?? null, numItems: limit }) + .order("desc") + .paginate({ cursor: args.cursor ?? null, numItems: limit }); - const filtered = page.filter((skill) => !skill.softDeletedAt) - const items = await buildPublicSkillEntries(ctx, filtered) - return { items, nextCursor: isDone ? null : continueCursor } + const filtered = page.filter((skill) => !skill.softDeletedAt); + const items = await buildPublicSkillEntries(ctx, filtered); + return { items, nextCursor: isDone ? null : continueCursor }; }, -}) +}); /** * V2 of listPublicPage using standard Convex pagination (paginate + usePaginatedQuery). @@ -1554,59 +1572,59 @@ export const listPublicPageV2 = query({ paginationOpts: paginationOptsValidator, sort: v.optional( v.union( - v.literal('newest'), - v.literal('updated'), - v.literal('downloads'), - v.literal('installs'), - v.literal('stars'), - v.literal('name'), + v.literal("newest"), + v.literal("updated"), + v.literal("downloads"), + v.literal("installs"), + v.literal("stars"), + v.literal("name"), ), ), - dir: v.optional(v.union(v.literal('asc'), v.literal('desc'))), + dir: v.optional(v.union(v.literal("asc"), v.literal("desc"))), nonSuspiciousOnly: v.optional(v.boolean()), }, handler: async (ctx, args) => { - const sort = args.sort ?? 'newest' - const dir = args.dir ?? (sort === 'name' ? 'asc' : 'desc') + const sort = args.sort ?? "newest"; + const dir = args.dir ?? (sort === "name" ? "asc" : "desc"); const paginationOpts: { cursor: string | null; numItems: number; id?: number } = { ...args.paginationOpts, numItems: clampInt(args.paginationOpts.numItems, 1, MAX_PUBLIC_LIST_LIMIT), - } + }; // Use the index to filter out soft-deleted skills at query time. // softDeletedAt === undefined means active (non-deleted) skills only. const result = await ctx.db - .query('skills') - .withIndex(SORT_INDEXES[sort], (q) => q.eq('softDeletedAt', undefined)) + .query("skills") + .withIndex(SORT_INDEXES[sort], (q) => q.eq("softDeletedAt", undefined)) .order(dir) - .paginate(paginationOpts) + .paginate(paginationOpts); const filteredPage = args.nonSuspiciousOnly ? result.page.filter((skill) => !isSkillSuspicious(skill)) - : result.page + : result.page; // Build the public skill entries (fetch latestVersion + ownerHandle) - const items = await buildPublicSkillEntries(ctx, filteredPage) - return { ...result, page: items } + const items = await buildPublicSkillEntries(ctx, filteredPage); + return { ...result, page: items }; }, -}) +}); function sortToIndex( - sort: 'downloads' | 'stars' | 'installsCurrent' | 'installsAllTime', + sort: "downloads" | "stars" | "installsCurrent" | "installsAllTime", ): - | 'by_stats_downloads' - | 'by_stats_stars' - | 'by_stats_installs_current' - | 'by_stats_installs_all_time' { + | "by_stats_downloads" + | "by_stats_stars" + | "by_stats_installs_current" + | "by_stats_installs_all_time" { switch (sort) { - case 'downloads': - return 'by_stats_downloads' - case 'stars': - return 'by_stats_stars' - case 'installsCurrent': - return 'by_stats_installs_current' - case 'installsAllTime': - return 'by_stats_installs_all_time' + case "downloads": + return "by_stats_downloads"; + case "stars": + return "by_stats_stars"; + case "installsCurrent": + return "by_stats_installs_current"; + case "installsAllTime": + return "by_stats_installs_all_time"; } } @@ -1614,135 +1632,135 @@ async function getTrendingEntries(ctx: QueryCtx, limit: number) { // Use the pre-computed leaderboard from the hourly cron job. // Avoid Date.now() here to keep the query deterministic and cacheable. const latest = await ctx.db - .query('skillLeaderboards') - .withIndex('by_kind', (q) => q.eq('kind', 'trending')) - .order('desc') - .take(1) + .query("skillLeaderboards") + .withIndex("by_kind", (q) => q.eq("kind", "trending")) + .order("desc") + .take(1); if (latest[0]) { - return latest[0].items.slice(0, limit) + return latest[0].items.slice(0, limit); } // No leaderboard exists yet (cold start) - compute on the fly - const fallback = await buildTrendingLeaderboard(ctx, { limit, now: Date.now() }) - return fallback.items + const fallback = await buildTrendingLeaderboard(ctx, { limit, now: Date.now() }); + return fallback.items; } export const listVersions = query({ - args: { skillId: v.id('skills'), limit: v.optional(v.number()) }, + args: { skillId: v.id("skills"), limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 20 + const limit = args.limit ?? 20; return ctx.db - .query('skillVersions') - .withIndex('by_skill', (q) => q.eq('skillId', args.skillId)) - .order('desc') - .take(limit) + .query("skillVersions") + .withIndex("by_skill", (q) => q.eq("skillId", args.skillId)) + .order("desc") + .take(limit); }, -}) +}); export const listVersionsPage = query({ args: { - skillId: v.id('skills'), + skillId: v.id("skills"), cursor: v.optional(v.string()), limit: v.optional(v.number()), }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_LIMIT) + const limit = clampInt(args.limit ?? 20, 1, MAX_LIST_LIMIT); const { page, isDone, continueCursor } = await ctx.db - .query('skillVersions') - .withIndex('by_skill', (q) => q.eq('skillId', args.skillId)) - .order('desc') - .paginate({ cursor: args.cursor ?? null, numItems: limit }) - const items = page.filter((version) => !version.softDeletedAt) - return { items, nextCursor: isDone ? null : continueCursor } + .query("skillVersions") + .withIndex("by_skill", (q) => q.eq("skillId", args.skillId)) + .order("desc") + .paginate({ cursor: args.cursor ?? null, numItems: limit }); + const items = page.filter((version) => !version.softDeletedAt); + return { items, nextCursor: isDone ? null : continueCursor }; }, -}) +}); export const getVersionById = query({ - args: { versionId: v.id('skillVersions') }, + args: { versionId: v.id("skillVersions") }, handler: async (ctx, args) => ctx.db.get(args.versionId), -}) +}); export const getVersionsByIdsInternal = internalQuery({ - args: { versionIds: v.array(v.id('skillVersions')) }, + args: { versionIds: v.array(v.id("skillVersions")) }, handler: async (ctx, args) => { - const versions = await Promise.all(args.versionIds.map((id) => ctx.db.get(id))) - return versions.filter((v): v is NonNullable => v !== null) + const versions = await Promise.all(args.versionIds.map((id) => ctx.db.get(id))); + return versions.filter((v): v is NonNullable => v !== null); }, -}) +}); export const getVersionByIdInternal = internalQuery({ - args: { versionId: v.id('skillVersions') }, + args: { versionId: v.id("skillVersions") }, handler: async (ctx, args) => ctx.db.get(args.versionId), -}) +}); export const getSkillByIdInternal = internalQuery({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => ctx.db.get(args.skillId), -}) +}); export const getPendingScanSkillsInternal = internalQuery({ args: { limit: v.optional(v.number()), skipRecentMinutes: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = clampInt(args.limit ?? 10, 1, 100) - const skipRecentMinutes = args.skipRecentMinutes ?? 60 - const skipThreshold = Date.now() - skipRecentMinutes * 60 * 1000 + const limit = clampInt(args.limit ?? 10, 1, 100); + const skipRecentMinutes = args.skipRecentMinutes ?? 60; + const skipThreshold = Date.now() - skipRecentMinutes * 60 * 1000; // Use an indexed query and bounded scan to avoid full-table reads under spam/high volume. - const poolSize = Math.min(Math.max(limit * 20, 200), 1000) + const poolSize = Math.min(Math.max(limit * 20, 200), 1000); const allSkills = await ctx.db - .query('skills') - .withIndex('by_active_updated', (q) => q.eq('softDeletedAt', undefined)) - .order('desc') - .take(poolSize) + .query("skills") + .withIndex("by_active_updated", (q) => q.eq("softDeletedAt", undefined)) + .order("desc") + .take(poolSize); const candidates = allSkills.filter((skill) => { - const reason = skill.moderationReason - if (skill.moderationStatus === 'hidden' && reason === 'pending.scan') return true - if (skill.moderationStatus === 'hidden' && reason === 'quality.low') return true - if (skill.moderationStatus === 'active' && reason === 'pending.scan') return true - if (skill.moderationStatus === 'active' && reason === 'scanner.vt.pending') return true + const reason = skill.moderationReason; + if (skill.moderationStatus === "hidden" && reason === "pending.scan") return true; + if (skill.moderationStatus === "hidden" && reason === "quality.low") return true; + if (skill.moderationStatus === "active" && reason === "pending.scan") return true; + if (skill.moderationStatus === "active" && reason === "scanner.vt.pending") return true; return ( - reason === 'scanner.llm.clean' || - reason === 'scanner.llm.suspicious' || - reason === 'scanner.llm.malicious' - ) - }) + reason === "scanner.llm.clean" || + reason === "scanner.llm.suspicious" || + reason === "scanner.llm.malicious" + ); + }); // Filter out recently checked skills const skills = candidates.filter( (s) => !s.scanLastCheckedAt || s.scanLastCheckedAt < skipThreshold, - ) + ); // Shuffle and take the requested limit (Fisher-Yates) for (let i = skills.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)) - ;[skills[i], skills[j]] = [skills[j], skills[i]] + const j = Math.floor(Math.random() * (i + 1)); + [skills[i], skills[j]] = [skills[j], skills[i]]; } - const selected = skills.slice(0, limit) + const selected = skills.slice(0, limit); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> | null - sha256hash: string | null - checkCount: number - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions"> | null; + sha256hash: string | null; + checkCount: number; + }> = []; for (const skill of selected) { - const version = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null + const version = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null; // Skip skills where version already has vtAnalysis or lacks sha256hash - if (version?.vtAnalysis || !version?.sha256hash) continue + if (version?.vtAnalysis || !version?.sha256hash) continue; results.push({ skillId: skill._id, versionId: version?._id ?? null, sha256hash: version?.sha256hash ?? null, checkCount: skill.scanCheckCount ?? 0, - }) + }); } - return results + return results; }, -}) +}); /** * Health check query to monitor scan queue status @@ -1751,28 +1769,28 @@ export const getScanQueueHealthInternal = internalQuery({ args: {}, handler: async (ctx) => { const pending = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), 'hidden'), - q.eq(q.field('moderationReason'), 'pending.scan'), + q.eq(q.field("moderationStatus"), "hidden"), + q.eq(q.field("moderationReason"), "pending.scan"), ), ) - .collect() + .collect(); - const now = Date.now() - const oneHourAgo = now - 60 * 60 * 1000 - const oneDayAgo = now - 24 * 60 * 60 * 1000 + const now = Date.now(); + const oneHourAgo = now - 60 * 60 * 1000; + const oneDayAgo = now - 24 * 60 * 60 * 1000; - let staleCount = 0 - let veryStaleCount = 0 - let oldestTimestamp = now + let staleCount = 0; + let veryStaleCount = 0; + let oldestTimestamp = now; for (const skill of pending) { - const createdAt = skill.createdAt ?? skill._creationTime - if (createdAt < oldestTimestamp) oldestTimestamp = createdAt - if (createdAt < oneHourAgo) staleCount++ - if (createdAt < oneDayAgo) veryStaleCount++ + const createdAt = skill.createdAt ?? skill._creationTime; + if (createdAt < oldestTimestamp) oldestTimestamp = createdAt; + if (createdAt < oneHourAgo) staleCount++; + if (createdAt < oneDayAgo) veryStaleCount++; } return { @@ -1781,9 +1799,9 @@ export const getScanQueueHealthInternal = internalQuery({ veryStaleCount, // pending > 24 hours oldestAgeMinutes: Math.round((now - oldestTimestamp) / 60000), healthy: pending.length < 50 && veryStaleCount === 0, - } + }; }, -}) +}); /** * Get active skills that have a version hash but no vtAnalysis cached. @@ -1792,52 +1810,52 @@ export const getScanQueueHealthInternal = internalQuery({ export const getActiveSkillsMissingVTCacheInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 100 - const poolSize = limit * 2 // Take more to account for some having vtAnalysis + const limit = args.limit ?? 100; + const poolSize = limit * 2; // Take more to account for some having vtAnalysis // Skills waiting for VT + LLM-evaluated skills that still need VT cache const vtPending = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), 'active'), - q.eq(q.field('moderationReason'), 'scanner.vt.pending'), + q.eq(q.field("moderationStatus"), "active"), + q.eq(q.field("moderationReason"), "scanner.vt.pending"), ), ) - .take(poolSize) + .take(poolSize); const llmEvaluated = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.or( - q.eq(q.field('moderationReason'), 'scanner.llm.clean'), - q.eq(q.field('moderationReason'), 'scanner.llm.suspicious'), - q.eq(q.field('moderationReason'), 'scanner.llm.malicious'), + q.eq(q.field("moderationReason"), "scanner.llm.clean"), + q.eq(q.field("moderationReason"), "scanner.llm.suspicious"), + q.eq(q.field("moderationReason"), "scanner.llm.malicious"), ), ) - .take(poolSize) + .take(poolSize); // Dedup across pools - const seen = new Set() - const allSkills: typeof vtPending = [] + const seen = new Set(); + const allSkills: typeof vtPending = []; for (const skill of [...vtPending, ...llmEvaluated]) { if (!seen.has(skill._id)) { - seen.add(skill._id) - allSkills.push(skill) + seen.add(skill._id); + allSkills.push(skill); } } const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - sha256hash: string - slug: string - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + sha256hash: string; + slug: string; + }> = []; for (const skill of allSkills) { - if (results.length >= limit) break - if (!skill.latestVersionId) continue - const version = await ctx.db.get(skill.latestVersionId) - if (!version) continue + if (results.length >= limit) break; + if (!skill.latestVersionId) continue; + const version = await ctx.db.get(skill.latestVersionId); + if (!version) continue; // Include if version has hash but no vtAnalysis if (version.sha256hash && !version.vtAnalysis) { results.push({ @@ -1845,13 +1863,13 @@ export const getActiveSkillsMissingVTCacheInternal = internalQuery({ versionId: version._id, sha256hash: version.sha256hash, slug: skill.slug, - }) + }); } } - return results + return results; }, -}) +}); /** * Get all active skills with VT analysis for daily re-scan. @@ -1860,33 +1878,33 @@ export const getAllActiveSkillsForRescanInternal = internalQuery({ args: {}, handler: async (ctx) => { const activeSkills = await ctx.db - .query('skills') - .filter((q) => q.eq(q.field('moderationStatus'), 'active')) - .collect() + .query("skills") + .filter((q) => q.eq(q.field("moderationStatus"), "active")) + .collect(); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - sha256hash: string - slug: string - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + sha256hash: string; + slug: string; + }> = []; for (const skill of activeSkills) { - if (!skill.latestVersionId) continue - const version = await ctx.db.get(skill.latestVersionId) - if (!version?.sha256hash) continue + if (!skill.latestVersionId) continue; + const version = await ctx.db.get(skill.latestVersionId); + if (!version?.sha256hash) continue; results.push({ skillId: skill._id, versionId: version._id, sha256hash: version.sha256hash, slug: skill.slug, - }) + }); } - return results + return results; }, -}) +}); /** * Cursor-based batch query for daily rescan. Uses _creationTime for stable pagination. @@ -1898,50 +1916,50 @@ export const getActiveSkillBatchForRescanInternal = internalQuery({ batchSize: v.optional(v.number()), }, handler: async (ctx, args) => { - const batchSize = args.batchSize ?? 100 - const cursor = args.cursor ?? 0 + const batchSize = args.batchSize ?? 100; + const cursor = args.cursor ?? 0; // Query skills created after the cursor, ordered by _creationTime (ascending for stable pagination) const candidates = await ctx.db - .query('skills') - .filter((q) => q.gt(q.field('_creationTime'), cursor)) - .order('asc') - .take(batchSize * 3) // Over-fetch to account for filtering + .query("skills") + .filter((q) => q.gt(q.field("_creationTime"), cursor)) + .order("asc") + .take(batchSize * 3); // Over-fetch to account for filtering const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - sha256hash: string - slug: string - }> = [] - let nextCursor = cursor + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + sha256hash: string; + slug: string; + }> = []; + let nextCursor = cursor; for (const skill of candidates) { - nextCursor = skill._creationTime - if (results.length >= batchSize) break + nextCursor = skill._creationTime; + if (results.length >= batchSize) break; // Filter out soft-deleted and non-active - if (skill.softDeletedAt) continue - if ((skill.moderationStatus ?? 'active') !== 'active') continue - if (!skill.latestVersionId) continue + if (skill.softDeletedAt) continue; + if ((skill.moderationStatus ?? "active") !== "active") continue; + if (!skill.latestVersionId) continue; - const version = await ctx.db.get(skill.latestVersionId) - if (!version?.sha256hash) continue + const version = await ctx.db.get(skill.latestVersionId); + if (!version?.sha256hash) continue; results.push({ skillId: skill._id, versionId: version._id, sha256hash: version.sha256hash, slug: skill.slug, - }) + }); } // Done when we got fewer candidates than our over-fetch limit - const done = candidates.length < batchSize * 3 + const done = candidates.length < batchSize * 3; - return { skills: results, nextCursor, done } + return { skills: results, nextCursor, done }; }, -}) +}); /** * Get active skills whose latest version has no llmAnalysis. @@ -1953,32 +1971,32 @@ export const getActiveSkillBatchForLlmBackfillInternal = internalQuery({ batchSize: v.optional(v.number()), }, handler: async (ctx, args) => { - const batchSize = args.batchSize ?? 10 - const cursor = args.cursor ?? 0 + const batchSize = args.batchSize ?? 10; + const cursor = args.cursor ?? 0; const candidates = await ctx.db - .query('skills') - .filter((q) => q.gt(q.field('_creationTime'), cursor)) - .order('asc') - .take(batchSize * 3) + .query("skills") + .filter((q) => q.gt(q.field("_creationTime"), cursor)) + .order("asc") + .take(batchSize * 3); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - }> = [] - let nextCursor = cursor + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + }> = []; + let nextCursor = cursor; for (const skill of candidates) { - nextCursor = skill._creationTime - if (results.length >= batchSize) break + nextCursor = skill._creationTime; + if (results.length >= batchSize) break; - if (skill.softDeletedAt) continue - if ((skill.moderationStatus ?? 'active') !== 'active') continue - if (!skill.latestVersionId) continue + if (skill.softDeletedAt) continue; + if ((skill.moderationStatus ?? "active") !== "active") continue; + if (!skill.latestVersionId) continue; - const version = await ctx.db.get(skill.latestVersionId) - if (!version) continue + const version = await ctx.db.get(skill.latestVersionId); + if (!version) continue; // Re-evaluate all skills (full file content reading upgrade) // if (version.llmAnalysis && version.llmAnalysis.status !== 'error') continue @@ -1986,14 +2004,14 @@ export const getActiveSkillBatchForLlmBackfillInternal = internalQuery({ skillId: skill._id, versionId: version._id, slug: skill.slug, - }) + }); } - const done = candidates.length < batchSize * 3 + const done = candidates.length < batchSize * 3; - return { skills: results, nextCursor, done } + return { skills: results, nextCursor, done }; }, -}) +}); /** * Get skills with stale moderationReason that have vtAnalysis cached. @@ -2002,29 +2020,29 @@ export const getActiveSkillBatchForLlmBackfillInternal = internalQuery({ export const getSkillsWithStaleModerationReasonInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 100 + const limit = args.limit ?? 100; // Find skills with pending-like moderationReason - const staleReasons = new Set(['scanner.vt.pending', 'pending.scan']) + const staleReasons = new Set(["scanner.vt.pending", "pending.scan"]); const allSkills = await ctx.db - .query('skills') - .filter((q) => q.eq(q.field('moderationStatus'), 'active')) - .collect() + .query("skills") + .filter((q) => q.eq(q.field("moderationStatus"), "active")) + .collect(); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - currentReason: string - vtStatus: string | null - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + currentReason: string; + vtStatus: string | null; + }> = []; for (const skill of allSkills) { - if (!skill.moderationReason || !staleReasons.has(skill.moderationReason)) continue - if (!skill.latestVersionId) continue + if (!skill.moderationReason || !staleReasons.has(skill.moderationReason)) continue; + if (!skill.latestVersionId) continue; - const version = await ctx.db.get(skill.latestVersionId) - if (!version?.vtAnalysis?.status) continue // Skip if no vtAnalysis + const version = await ctx.db.get(skill.latestVersionId); + if (!version?.vtAnalysis?.status) continue; // Skip if no vtAnalysis results.push({ skillId: skill._id, @@ -2032,14 +2050,14 @@ export const getSkillsWithStaleModerationReasonInternal = internalQuery({ slug: skill.slug, currentReason: skill.moderationReason, vtStatus: version.vtAnalysis.status, - }) + }); - if (results.length >= limit) break + if (results.length >= limit) break; } - return results + return results; }, -}) +}); /** * Get skills with scanner.vt.pending that need reanalysis. @@ -2048,56 +2066,155 @@ export const getSkillsWithStaleModerationReasonInternal = internalQuery({ export const getPendingVTSkillsInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 100 + const limit = args.limit ?? 100; const skills = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), 'active'), - q.eq(q.field('moderationReason'), 'scanner.vt.pending'), + q.eq(q.field("moderationStatus"), "active"), + q.eq(q.field("moderationReason"), "scanner.vt.pending"), ), ) - .take(limit) + .take(limit); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - sha256hash: string - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + sha256hash: string; + }> = []; for (const skill of skills) { - if (!skill.latestVersionId) continue - const version = await ctx.db.get(skill.latestVersionId) - if (!version?.sha256hash) continue + if (!skill.latestVersionId) continue; + const version = await ctx.db.get(skill.latestVersionId); + if (!version?.sha256hash) continue; results.push({ skillId: skill._id, versionId: version._id, slug: skill.slug, sha256hash: version.sha256hash, - }) + }); } - return results + return results; }, -}) +}); /** * Update a skill's moderationReason. */ export const updateSkillModerationReasonInternal = internalMutation({ args: { - skillId: v.id('skills'), + skillId: v.id("skills"), moderationReason: v.string(), }, handler: async (ctx, args) => { await ctx.db.patch(args.skillId, { moderationReason: args.moderationReason, - }) + }); }, -}) +}); + +/** + * Cursor-based skill scan for v2 moderation backfill. + * Includes active + hidden skills, excludes hard-removed skills. + */ +export const getSkillBatchForModerationBackfillInternal = internalQuery({ + args: { + cursor: v.optional(v.number()), + batchSize: v.optional(v.number()), + }, + handler: async (ctx, args) => { + const batchSize = args.batchSize ?? 100; + const cursor = args.cursor ?? 0; + const candidates = await ctx.db + .query("skills") + .filter((q) => q.gt(q.field("_creationTime"), cursor)) + .order("asc") + .take(batchSize * 3); + + const skills: Array<{ + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + createdCursor: number; + }> = []; + let nextCursor = cursor; + for (const skill of candidates) { + nextCursor = skill._creationTime; + if (skills.length >= batchSize) break; + if ((skill.moderationStatus ?? "active") === "removed") continue; + if (!skill.latestVersionId) continue; + skills.push({ + skillId: skill._id, + versionId: skill.latestVersionId, + slug: skill.slug, + createdCursor: skill._creationTime, + }); + } + const done = candidates.length < batchSize * 3; + return { skills, nextCursor, done }; + }, +}); + +export const applyModerationBackfillInternal = internalMutation({ + args: { + skillId: v.id("skills"), + versionId: v.id("skillVersions"), + }, + handler: async (ctx, args) => { + const skill = await ctx.db.get(args.skillId); + const version = await ctx.db.get(args.versionId); + if (!skill || !version) return { ok: false as const, reason: "missing" }; + if ((skill.moderationStatus ?? "active") === "removed") { + return { ok: false as const, reason: "removed" }; + } + + const snapshot = buildModerationSnapshot({ + staticScan: version.staticScan + ? { + status: version.staticScan.status, + reasonCodes: version.staticScan.reasonCodes, + findings: version.staticScan.findings, + summary: version.staticScan.summary, + engineVersion: version.staticScan.engineVersion, + checkedAt: version.staticScan.checkedAt, + } + : undefined, + vtStatus: version.vtAnalysis?.status, + llmStatus: version.llmAnalysis?.status, + existingReasonCodes: (skill.moderationReasonCodes as string[] | undefined) ?? [], + existingEvidence: (skill.moderationEvidence as ModerationFinding[] | undefined) ?? [], + sourceVersionId: version._id, + }); + + const now = Date.now(); + const nextStatus = + snapshot.verdict === "malicious" && (skill.moderationStatus ?? "active") === "active" + ? "hidden" + : skill.moderationStatus; + + await ctx.db.patch(skill._id, { + moderationStatus: nextStatus, + moderationFlags: snapshot.legacyFlags, + moderationVerdict: snapshot.verdict, + moderationReasonCodes: snapshot.reasonCodes.length ? snapshot.reasonCodes : undefined, + moderationEvidence: snapshot.evidence.length ? snapshot.evidence : undefined, + moderationSummary: snapshot.summary, + moderationEngineVersion: snapshot.engineVersion, + moderationEvaluatedAt: snapshot.evaluatedAt, + moderationSourceVersionId: version._id, + hiddenAt: + nextStatus === "hidden" && (skill.moderationStatus ?? "active") === "active" + ? now + : skill.hiddenAt, + updatedAt: now, + }); + return { ok: true as const, verdict: snapshot.verdict }; + }, +}); /** * Get skills with null moderationStatus that need to be normalized. @@ -2105,141 +2222,148 @@ export const updateSkillModerationReasonInternal = internalMutation({ export const getSkillsWithNullModerationStatusInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 100 + const limit = args.limit ?? 100; const skills = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), undefined), - q.eq(q.field('softDeletedAt'), undefined), + q.eq(q.field("moderationStatus"), undefined), + q.eq(q.field("softDeletedAt"), undefined), ), ) - .take(limit) + .take(limit); return skills.map((s) => ({ skillId: s._id, slug: s.slug, moderationReason: s.moderationReason, - })) + })); }, -}) +}); /** * Set moderationStatus to 'active' for a skill. */ export const setSkillModerationStatusActiveInternal = internalMutation({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => { await ctx.db.patch(args.skillId, { - moderationStatus: 'active', - }) + moderationStatus: "active", + }); }, -}) +}); -async function listSkillEmbeddingsForSkill(ctx: MutationCtx, skillId: Id<'skills'>) { +async function listSkillEmbeddingsForSkill(ctx: MutationCtx, skillId: Id<"skills">) { return ctx.db - .query('skillEmbeddings') - .withIndex('by_skill', (q) => q.eq('skillId', skillId)) - .collect() + .query("skillEmbeddings") + .withIndex("by_skill", (q) => q.eq("skillId", skillId)) + .collect(); } -async function markSkillEmbeddingsDeleted(ctx: MutationCtx, skillId: Id<'skills'>, now: number) { - const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId) +async function markSkillEmbeddingsDeleted(ctx: MutationCtx, skillId: Id<"skills">, now: number) { + const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId); for (const embedding of embeddings) { - if (embedding.visibility === 'deleted') continue - await ctx.db.patch(embedding._id, { visibility: 'deleted', updatedAt: now }) + if (embedding.visibility === "deleted") continue; + await ctx.db.patch(embedding._id, { visibility: "deleted", updatedAt: now }); } } -async function restoreSkillEmbeddingsVisibility(ctx: MutationCtx, skillId: Id<'skills'>, now: number) { - const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId) +async function restoreSkillEmbeddingsVisibility( + ctx: MutationCtx, + skillId: Id<"skills">, + now: number, +) { + const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId); for (const embedding of embeddings) { - const visibility = embeddingVisibilityFor(embedding.isLatest, embedding.isApproved) - await ctx.db.patch(embedding._id, { visibility, updatedAt: now }) + const visibility = embeddingVisibilityFor(embedding.isLatest, embedding.isApproved); + await ctx.db.patch(embedding._id, { visibility, updatedAt: now }); } } async function setSkillEmbeddingsSoftDeleted( ctx: MutationCtx, - skillId: Id<'skills'>, + skillId: Id<"skills">, deleted: boolean, now: number, ) { if (deleted) { - await markSkillEmbeddingsDeleted(ctx, skillId, now) - return + await markSkillEmbeddingsDeleted(ctx, skillId, now); + return; } - await restoreSkillEmbeddingsVisibility(ctx, skillId, now) + await restoreSkillEmbeddingsVisibility(ctx, skillId, now); } async function setSkillEmbeddingsLatestVersion( ctx: MutationCtx, - skillId: Id<'skills'>, - latestVersionId: Id<'skillVersions'>, + skillId: Id<"skills">, + latestVersionId: Id<"skillVersions">, now: number, ) { - const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId) + const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId); for (const embedding of embeddings) { - const isLatest = embedding.versionId === latestVersionId + const isLatest = embedding.versionId === latestVersionId; await ctx.db.patch(embedding._id, { isLatest, visibility: embeddingVisibilityFor(isLatest, embedding.isApproved), updatedAt: now, - }) + }); } } async function setSkillEmbeddingsApproved( ctx: MutationCtx, - skillId: Id<'skills'>, + skillId: Id<"skills">, approved: boolean, now: number, ) { - const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId) + const embeddings = await listSkillEmbeddingsForSkill(ctx, skillId); for (const embedding of embeddings) { await ctx.db.patch(embedding._id, { isApproved: approved, visibility: embeddingVisibilityFor(embedding.isLatest, approved), updatedAt: now, - }) + }); } } export const applyBanToOwnedSkillsBatchInternal = internalMutation({ args: { - ownerUserId: v.id('users'), + ownerUserId: v.id("users"), bannedAt: v.number(), - hiddenBy: v.optional(v.id('users')), + hiddenBy: v.optional(v.id("users")), cursor: v.optional(v.string()), }, handler: async (ctx, args) => { const { page, isDone, continueCursor } = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', args.ownerUserId)) - .order('desc') - .paginate({ cursor: args.cursor ?? null, numItems: BAN_USER_SKILLS_BATCH_SIZE }) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", args.ownerUserId)) + .order("desc") + .paginate({ cursor: args.cursor ?? null, numItems: BAN_USER_SKILLS_BATCH_SIZE }); - let hiddenCount = 0 + let hiddenCount = 0; for (const skill of page) { - if (skill.softDeletedAt) continue + if (skill.softDeletedAt) continue; // Only overwrite moderation fields for active skills. Keep existing hidden/removed // moderation reasons intact. - const shouldMarkModeration = (skill.moderationStatus ?? 'active') === 'active' + const shouldMarkModeration = (skill.moderationStatus ?? "active") === "active"; - const patch: Partial> = { softDeletedAt: args.bannedAt, updatedAt: args.bannedAt } + const patch: Partial> = { + softDeletedAt: args.bannedAt, + updatedAt: args.bannedAt, + }; if (shouldMarkModeration) { - patch.moderationStatus = 'hidden' - patch.moderationReason = 'user.banned' - patch.hiddenAt = args.bannedAt - patch.hiddenBy = args.hiddenBy - patch.lastReviewedAt = args.bannedAt - hiddenCount += 1 + patch.moderationStatus = "hidden"; + patch.moderationReason = "user.banned"; + patch.hiddenAt = args.bannedAt; + patch.hiddenBy = args.hiddenBy; + patch.lastReviewedAt = args.bannedAt; + hiddenCount += 1; } - await ctx.db.patch(skill._id, patch) - await setSkillEmbeddingsSoftDeleted(ctx, skill._id, true, args.bannedAt) + await ctx.db.patch(skill._id, patch); + await setSkillEmbeddingsSoftDeleted(ctx, skill._id, true, args.bannedAt); } scheduleNextBatchIfNeeded( @@ -2248,48 +2372,48 @@ export const applyBanToOwnedSkillsBatchInternal = internalMutation({ args, isDone, continueCursor, - ) + ); - return { ok: true as const, hiddenCount, scheduled: !isDone } + return { ok: true as const, hiddenCount, scheduled: !isDone }; }, -}) +}); export const restoreOwnedSkillsForUnbanBatchInternal = internalMutation({ args: { - ownerUserId: v.id('users'), + ownerUserId: v.id("users"), bannedAt: v.number(), cursor: v.optional(v.string()), }, handler: async (ctx, args) => { - const now = Date.now() + const now = Date.now(); const { page, isDone, continueCursor } = await ctx.db - .query('skills') - .withIndex('by_owner', (q) => q.eq('ownerUserId', args.ownerUserId)) - .order('desc') - .paginate({ cursor: args.cursor ?? null, numItems: BAN_USER_SKILLS_BATCH_SIZE }) + .query("skills") + .withIndex("by_owner", (q) => q.eq("ownerUserId", args.ownerUserId)) + .order("desc") + .paginate({ cursor: args.cursor ?? null, numItems: BAN_USER_SKILLS_BATCH_SIZE }); - let restoredCount = 0 + let restoredCount = 0; for (const skill of page) { if ( !skill.softDeletedAt || skill.softDeletedAt !== args.bannedAt || - skill.moderationReason !== 'user.banned' + skill.moderationReason !== "user.banned" ) { - continue + continue; } await ctx.db.patch(skill._id, { softDeletedAt: undefined, - moderationStatus: 'active', - moderationReason: 'restored.unban', + moderationStatus: "active", + moderationReason: "restored.unban", hiddenAt: undefined, hiddenBy: undefined, lastReviewedAt: now, updatedAt: now, - }) + }); - await setSkillEmbeddingsSoftDeleted(ctx, skill._id, false, now) - restoredCount += 1 + await setSkillEmbeddingsSoftDeleted(ctx, skill._id, false, now); + restoredCount += 1; } scheduleNextBatchIfNeeded( @@ -2298,11 +2422,11 @@ export const restoreOwnedSkillsForUnbanBatchInternal = internalMutation({ args, isDone, continueCursor, - ) + ); - return { ok: true as const, restoredCount, scheduled: !isDone } + return { ok: true as const, restoredCount, scheduled: !isDone }; }, -}) +}); /** * Get legacy skills that are active but still have "pending.scan" reason. @@ -2311,38 +2435,38 @@ export const restoreOwnedSkillsForUnbanBatchInternal = internalMutation({ export const getLegacyPendingScanSkillsInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 1000 + const limit = args.limit ?? 1000; const skills = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), 'active'), - q.eq(q.field('moderationReason'), 'pending.scan'), + q.eq(q.field("moderationStatus"), "active"), + q.eq(q.field("moderationReason"), "pending.scan"), ), ) - .take(limit) + .take(limit); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - hasHash: boolean - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + hasHash: boolean; + }> = []; for (const skill of skills) { - if (!skill.latestVersionId) continue - const version = await ctx.db.get(skill.latestVersionId) + if (!skill.latestVersionId) continue; + const version = await ctx.db.get(skill.latestVersionId); results.push({ skillId: skill._id, - versionId: version?._id ?? ('' as Id<'skillVersions'>), + versionId: version?._id ?? ("" as Id<"skillVersions">), slug: skill.slug, hasHash: Boolean(version?.sha256hash), - }) + }); } - return results + return results; }, -}) +}); /** * Get active skills that bypassed VT entirely (null moderationReason). @@ -2350,84 +2474,84 @@ export const getLegacyPendingScanSkillsInternal = internalQuery({ export const getUnscannedActiveSkillsInternal = internalQuery({ args: { limit: v.optional(v.number()) }, handler: async (ctx, args) => { - const limit = args.limit ?? 1000 + const limit = args.limit ?? 1000; const skills = await ctx.db - .query('skills') + .query("skills") .filter((q) => q.and( - q.eq(q.field('moderationStatus'), 'active'), - q.eq(q.field('moderationReason'), undefined), + q.eq(q.field("moderationStatus"), "active"), + q.eq(q.field("moderationReason"), undefined), ), ) - .take(limit) + .take(limit); const results: Array<{ - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - }> = [] + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + }> = []; for (const skill of skills) { - if (skill.softDeletedAt) continue - if (!skill.latestVersionId) continue - const version = await ctx.db.get(skill.latestVersionId) + if (skill.softDeletedAt) continue; + if (!skill.latestVersionId) continue; + const version = await ctx.db.get(skill.latestVersionId); results.push({ skillId: skill._id, - versionId: version?._id ?? ('' as Id<'skillVersions'>), + versionId: version?._id ?? ("" as Id<"skillVersions">), slug: skill.slug, - }) + }); } - return results + return results; }, -}) +}); /** * Update scan tracking for a skill (called after each VT check) */ export const updateScanCheckInternal = internalMutation({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => { - const skill = await ctx.db.get(args.skillId) - if (!skill) return + const skill = await ctx.db.get(args.skillId); + if (!skill) return; await ctx.db.patch(args.skillId, { scanLastCheckedAt: Date.now(), scanCheckCount: (skill.scanCheckCount ?? 0) + 1, - }) + }); }, -}) +}); /** * Mark a skill as stale after too many failed scan checks * TODO: Setup webhook/notification when skills are marked stale for manual review */ export const markScanStaleInternal = internalMutation({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => { - const skill = await ctx.db.get(args.skillId) - if (!skill) return + const skill = await ctx.db.get(args.skillId); + if (!skill) return; await ctx.db.patch(args.skillId, { - moderationReason: 'pending.scan.stale', + moderationReason: "pending.scan.stale", updatedAt: Date.now(), - }) + }); }, -}) +}); export const listVersionsInternal = internalQuery({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => { return await ctx.db - .query('skillVersions') - .withIndex('by_skill', (q) => q.eq('skillId', args.skillId)) - .collect() + .query("skillVersions") + .withIndex("by_skill", (q) => q.eq("skillId", args.skillId)) + .collect(); }, -}) +}); export const updateVersionScanResultsInternal = internalMutation({ args: { - versionId: v.id('skillVersions'), + versionId: v.id("skillVersions"), sha256hash: v.optional(v.string()), vtAnalysis: v.optional( v.object({ @@ -2440,26 +2564,26 @@ export const updateVersionScanResultsInternal = internalMutation({ ), }, handler: async (ctx, args) => { - const version = await ctx.db.get(args.versionId) - if (!version) return + const version = await ctx.db.get(args.versionId); + if (!version) return; - const patch: Partial> = {} + const patch: Partial> = {}; if (args.sha256hash !== undefined) { - patch.sha256hash = args.sha256hash + patch.sha256hash = args.sha256hash; } if (args.vtAnalysis !== undefined) { - patch.vtAnalysis = args.vtAnalysis + patch.vtAnalysis = args.vtAnalysis; } if (Object.keys(patch).length > 0) { - await ctx.db.patch(args.versionId, patch) + await ctx.db.patch(args.versionId, patch); } }, -}) +}); export const updateVersionLlmAnalysisInternal = internalMutation({ args: { - versionId: v.id('skillVersions'), + versionId: v.id("skillVersions"), llmAnalysis: v.object({ status: v.string(), verdict: v.optional(v.string()), @@ -2482,88 +2606,112 @@ export const updateVersionLlmAnalysisInternal = internalMutation({ }), }, handler: async (ctx, args) => { - const version = await ctx.db.get(args.versionId) - if (!version) return - await ctx.db.patch(args.versionId, { llmAnalysis: args.llmAnalysis }) + const version = await ctx.db.get(args.versionId); + if (!version) return; + await ctx.db.patch(args.versionId, { llmAnalysis: args.llmAnalysis }); }, -}) +}); export const approveSkillByHashInternal = internalMutation({ args: { sha256hash: v.string(), scanner: v.string(), status: v.string(), - moderationStatus: v.optional(v.union(v.literal('active'), v.literal('hidden'))), + moderationStatus: v.optional(v.union(v.literal("active"), v.literal("hidden"))), }, handler: async (ctx, args) => { const version = await ctx.db - .query('skillVersions') - .withIndex('by_sha256hash', (q) => q.eq('sha256hash', args.sha256hash)) - .unique() + .query("skillVersions") + .withIndex("by_sha256hash", (q) => q.eq("sha256hash", args.sha256hash)) + .unique(); - if (!version) throw new Error('Version not found for hash') + if (!version) throw new Error("Version not found for hash"); // Update the skill's moderation status based on scan result - const skill = await ctx.db.get(version.skillId) + const skill = await ctx.db.get(version.skillId); if (skill) { - const owner = skill.ownerUserId ? await ctx.db.get(skill.ownerUserId) : null - const isMalicious = args.status === 'malicious' - const isSuspicious = args.status === 'suspicious' - const isClean = !isMalicious && !isSuspicious + const owner = skill.ownerUserId ? await ctx.db.get(skill.ownerUserId) : null; + const isMalicious = args.status === "malicious"; + const isSuspicious = args.status === "suspicious"; + const isClean = !isMalicious && !isSuspicious; // Defense-in-depth: read existing flags to merge scanner results. // The stricter verdict always wins across scanners. - const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? [] - const existingReason: string | undefined = skill.moderationReason as string | undefined - const alreadyBlocked = existingFlags.includes('blocked.malware') - const alreadyFlagged = existingFlags.includes('flagged.suspicious') + const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? []; + const existingReason: string | undefined = skill.moderationReason as string | undefined; + const alreadyBlocked = existingFlags.includes("blocked.malware"); + const alreadyFlagged = existingFlags.includes("flagged.suspicious"); const bypassSuspicious = - isSuspicious && !alreadyBlocked && isPrivilegedOwnerForSuspiciousBypass(owner) + isSuspicious && !alreadyBlocked && isPrivilegedOwnerForSuspiciousBypass(owner); // Determine new flags based on multi-scanner merge - let newFlags: string[] | undefined + let newFlags: string[] | undefined; if (isMalicious || alreadyBlocked) { // Malicious from ANY scanner → blocked.malware (upgrade from suspicious) - newFlags = ['blocked.malware'] + newFlags = ["blocked.malware"]; } else if ((isSuspicious || alreadyFlagged) && !bypassSuspicious) { // Suspicious from ANY scanner → flagged.suspicious - newFlags = ['flagged.suspicious'] + newFlags = ["flagged.suspicious"]; } else if (isClean) { // Clean from this scanner — only clear if no other scanner has flagged const otherScannerFlagged = - existingReason?.startsWith('scanner.') && + existingReason?.startsWith("scanner.") && !existingReason.startsWith(`scanner.${args.scanner}.`) && - !existingReason.endsWith('.clean') && - !existingReason.endsWith('.pending') - newFlags = otherScannerFlagged ? existingFlags : undefined + !existingReason.endsWith(".clean") && + !existingReason.endsWith(".pending"); + newFlags = otherScannerFlagged ? existingFlags : undefined; } if (!alreadyBlocked && isPrivilegedOwnerForSuspiciousBypass(owner)) { - newFlags = stripSuspiciousFlag(newFlags ?? existingFlags) + newFlags = stripSuspiciousFlag(newFlags ?? existingFlags); } - const now = Date.now() - const qualityLocked = skill.moderationReason === 'quality.low' && !isMalicious - const nextModerationStatus = qualityLocked ? 'hidden' : 'active' + const now = Date.now(); + const qualityLocked = skill.moderationReason === "quality.low" && !isMalicious; + const nextModerationStatus = qualityLocked ? "hidden" : "active"; const nextModerationReason = qualityLocked - ? 'quality.low' + ? "quality.low" : bypassSuspicious ? `scanner.${args.scanner}.clean` - : `scanner.${args.scanner}.${args.status}` + : `scanner.${args.scanner}.${args.status}`; const nextModerationNotes = qualityLocked ? (skill.moderationNotes ?? - 'Quality gate quarantine is still active. Manual moderation review required.') - : undefined + "Quality gate quarantine is still active. Manual moderation review required.") + : undefined; + const scanner = args.scanner.trim().toLowerCase(); + const existingReasonCodes = (skill.moderationReasonCodes as string[] | undefined) ?? []; + const existingEvidence = (skill.moderationEvidence as ModerationFinding[] | undefined) ?? []; + const snapshot = buildModerationSnapshot({ + existingReasonCodes, + existingEvidence, + vtStatus: scanner === "vt" ? args.status : undefined, + llmStatus: scanner === "llm" ? args.status : undefined, + sourceVersionId: version._id, + }); + const nextReasonCodes = + bypassSuspicious && !isMalicious + ? snapshot.reasonCodes.filter((code) => !code.startsWith("suspicious.")) + : snapshot.reasonCodes; + const nextVerdict = verdictFromCodes(nextReasonCodes); + const nextLegacyFlags = legacyFlagsFromVerdict(nextVerdict); + const nextSummary = summarizeReasonCodes(nextReasonCodes); await ctx.db.patch(skill._id, { moderationStatus: nextModerationStatus, moderationReason: nextModerationReason, - moderationFlags: newFlags, + moderationFlags: newFlags ?? nextLegacyFlags, + moderationVerdict: nextVerdict, + moderationReasonCodes: nextReasonCodes.length ? nextReasonCodes : undefined, + moderationEvidence: snapshot.evidence.length ? snapshot.evidence : undefined, + moderationSummary: nextSummary, + moderationEngineVersion: snapshot.engineVersion, + moderationEvaluatedAt: snapshot.evaluatedAt, + moderationSourceVersionId: version._id, moderationNotes: nextModerationNotes, - hiddenAt: nextModerationStatus === 'hidden' ? now : undefined, + hiddenAt: nextModerationStatus === "hidden" ? now : undefined, hiddenBy: undefined, - lastReviewedAt: nextModerationStatus === 'hidden' ? now : undefined, + lastReviewedAt: nextModerationStatus === "hidden" ? now : undefined, updatedAt: now, - }) + }); // Auto-ban authors of malicious skills (skips moderators/admins) if (isMalicious && skill.ownerUserId) { @@ -2571,13 +2719,13 @@ export const approveSkillByHashInternal = internalMutation({ ownerUserId: skill.ownerUserId, sha256hash: args.sha256hash, slug: skill.slug, - }) + }); } } - return { ok: true, skillId: version.skillId, versionId: version._id } + return { ok: true, skillId: version.skillId, versionId: version._id }; }, -}) +}); /** * Lighter VT-only escalation: adds moderation flags and hides/bans for malicious, @@ -2586,52 +2734,73 @@ export const approveSkillByHashInternal = internalMutation({ export const escalateByVtInternal = internalMutation({ args: { sha256hash: v.string(), - status: v.union(v.literal('malicious'), v.literal('suspicious')), + status: v.union(v.literal("malicious"), v.literal("suspicious")), }, handler: async (ctx, args) => { const version = await ctx.db - .query('skillVersions') - .withIndex('by_sha256hash', (q) => q.eq('sha256hash', args.sha256hash)) - .unique() + .query("skillVersions") + .withIndex("by_sha256hash", (q) => q.eq("sha256hash", args.sha256hash)) + .unique(); - if (!version) throw new Error('Version not found for hash') + if (!version) throw new Error("Version not found for hash"); - const skill = await ctx.db.get(version.skillId) - if (!skill) return + const skill = await ctx.db.get(version.skillId); + if (!skill) return; - const isMalicious = args.status === 'malicious' - const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? [] - const alreadyBlocked = existingFlags.includes('blocked.malware') - const owner = skill.ownerUserId ? await ctx.db.get(skill.ownerUserId) : null + const isMalicious = args.status === "malicious"; + const existingFlags: string[] = (skill.moderationFlags as string[] | undefined) ?? []; + const alreadyBlocked = existingFlags.includes("blocked.malware"); + const owner = skill.ownerUserId ? await ctx.db.get(skill.ownerUserId) : null; const bypassSuspicious = - !isMalicious && !alreadyBlocked && isPrivilegedOwnerForSuspiciousBypass(owner) + !isMalicious && !alreadyBlocked && isPrivilegedOwnerForSuspiciousBypass(owner); // Determine new flags — stricter verdict always wins - let newFlags: string[] + let newFlags: string[]; if (isMalicious || alreadyBlocked) { - newFlags = ['blocked.malware'] + newFlags = ["blocked.malware"]; } else if (bypassSuspicious) { - newFlags = stripSuspiciousFlag(existingFlags) ?? [] + newFlags = stripSuspiciousFlag(existingFlags) ?? []; } else { - newFlags = ['flagged.suspicious'] + newFlags = ["flagged.suspicious"]; } const patch: Record = { moderationFlags: newFlags.length ? newFlags : undefined, updatedAt: Date.now(), - } + }; + const existingReasonCodes = (skill.moderationReasonCodes as string[] | undefined) ?? []; + const existingEvidence = (skill.moderationEvidence as ModerationFinding[] | undefined) ?? []; + const snapshot = buildModerationSnapshot({ + existingReasonCodes, + existingEvidence, + vtStatus: args.status, + sourceVersionId: version._id, + }); + const nextReasonCodes = + bypassSuspicious && !isMalicious + ? snapshot.reasonCodes.filter((code) => !code.startsWith("suspicious.")) + : snapshot.reasonCodes; + const nextVerdict = verdictFromCodes(nextReasonCodes); + patch.moderationVerdict = nextVerdict; + patch.moderationReasonCodes = nextReasonCodes.length ? nextReasonCodes : undefined; + patch.moderationEvidence = snapshot.evidence.length ? snapshot.evidence : undefined; + patch.moderationSummary = summarizeReasonCodes(nextReasonCodes); + patch.moderationEngineVersion = snapshot.engineVersion; + patch.moderationEvaluatedAt = snapshot.evaluatedAt; + patch.moderationSourceVersionId = version._id; + patch.moderationFlags = legacyFlagsFromVerdict(nextVerdict); if (bypassSuspicious) { patch.moderationReason = normalizeScannerSuspiciousReason( skill.moderationReason as string | undefined, - ) + ); } // Only hide for malicious — suspicious stays visible with a flag if (isMalicious) { - patch.moderationStatus = 'hidden' + patch.moderationStatus = "hidden"; } - await ctx.db.patch(skill._id, patch) + await ctx.db.patch(skill._id, patch); // Auto-ban authors of malicious skills if (isMalicious && skill.ownerUserId) { @@ -2639,24 +2808,24 @@ export const escalateByVtInternal = internalMutation({ ownerUserId: skill.ownerUserId, sha256hash: args.sha256hash, slug: skill.slug, - }) + }); } - return { ok: true, skillId: version.skillId, versionId: version._id } + return { ok: true, skillId: version.skillId, versionId: version._id }; }, -}) +}); export const getVersionBySkillAndVersion = query({ - args: { skillId: v.id('skills'), version: v.string() }, + args: { skillId: v.id("skills"), version: v.string() }, handler: async (ctx, args) => { return ctx.db - .query('skillVersions') - .withIndex('by_skill_version', (q) => - q.eq('skillId', args.skillId).eq('version', args.version), + .query("skillVersions") + .withIndex("by_skill_version", (q) => + q.eq("skillId", args.skillId).eq("version", args.version), ) - .unique() + .unique(); }, -}) +}); export const publishVersion: ReturnType = action({ args: { @@ -2675,17 +2844,17 @@ export const publishVersion: ReturnType = action({ v.object({ path: v.string(), size: v.number(), - storageId: v.id('_storage'), + storageId: v.id("_storage"), sha256: v.string(), contentType: v.optional(v.string()), }), ), }, handler: async (ctx, args): Promise => { - const { userId } = await requireUserFromAction(ctx) - return publishVersionForUser(ctx, userId, args) + const { userId } = await requireUserFromAction(ctx); + return publishVersionForUser(ctx, userId, args); }, -}) +}); export const generateChangelogPreview = action({ args: { @@ -2695,108 +2864,108 @@ export const generateChangelogPreview = action({ filePaths: v.optional(v.array(v.string())), }, handler: async (ctx, args) => { - await requireUserFromAction(ctx) + await requireUserFromAction(ctx); const changelog = await buildChangelogPreview(ctx, { slug: args.slug.trim().toLowerCase(), version: args.version.trim(), readmeText: args.readmeText, filePaths: args.filePaths?.map((value) => value.trim()).filter(Boolean), - }) - return { changelog, source: 'auto' as const } + }); + return { changelog, source: "auto" as const }; }, -}) +}); export const getReadme: ReturnType = action({ - args: { versionId: v.id('skillVersions') }, + args: { versionId: v.id("skillVersions") }, handler: async (ctx, args): Promise => { const version = (await ctx.runQuery(internal.skills.getVersionByIdInternal, { versionId: args.versionId, - })) as Doc<'skillVersions'> | null - if (!version) throw new ConvexError('Version not found') + })) as Doc<"skillVersions"> | null; + if (!version) throw new ConvexError("Version not found"); const readmeFile = version.files.find( - (file) => file.path.toLowerCase() === 'skill.md' || file.path.toLowerCase() === 'skills.md', - ) - if (!readmeFile) throw new ConvexError('SKILL.md not found') - const text = await fetchText(ctx, readmeFile.storageId) - return { path: readmeFile.path, text } + (file) => file.path.toLowerCase() === "skill.md" || file.path.toLowerCase() === "skills.md", + ); + if (!readmeFile) throw new ConvexError("SKILL.md not found"); + const text = await fetchText(ctx, readmeFile.storageId); + return { path: readmeFile.path, text }; }, -}) +}); export const getFileText: ReturnType = action({ - args: { versionId: v.id('skillVersions'), path: v.string() }, + args: { versionId: v.id("skillVersions"), path: v.string() }, handler: async (ctx, args): Promise => { const version = (await ctx.runQuery(internal.skills.getVersionByIdInternal, { versionId: args.versionId, - })) as Doc<'skillVersions'> | null - if (!version) throw new ConvexError('Version not found') + })) as Doc<"skillVersions"> | null; + if (!version) throw new ConvexError("Version not found"); - const normalizedPath = args.path.trim() - const normalizedLower = normalizedPath.toLowerCase() + const normalizedPath = args.path.trim(); + const normalizedLower = normalizedPath.toLowerCase(); const file = version.files.find((entry) => entry.path === normalizedPath) ?? - version.files.find((entry) => entry.path.toLowerCase() === normalizedLower) - if (!file) throw new ConvexError('File not found') + version.files.find((entry) => entry.path.toLowerCase() === normalizedLower); + if (!file) throw new ConvexError("File not found"); if (file.size > MAX_DIFF_FILE_BYTES) { - throw new ConvexError('File exceeds 200KB limit') + throw new ConvexError("File exceeds 200KB limit"); } - const text = await fetchText(ctx, file.storageId) - return { path: file.path, text, size: file.size, sha256: file.sha256 } + const text = await fetchText(ctx, file.storageId); + return { path: file.path, text, size: file.size, sha256: file.sha256 }; }, -}) +}); export const resolveVersionByHash = query({ args: { slug: v.string(), hash: v.string() }, handler: async (ctx, args) => { - const slug = args.slug.trim().toLowerCase() - const hash = args.hash.trim().toLowerCase() - if (!slug || !/^[a-f0-9]{64}$/.test(hash)) return null + const slug = args.slug.trim().toLowerCase(); + const hash = args.hash.trim().toLowerCase(); + if (!slug || !/^[a-f0-9]{64}$/.test(hash)) return null; const skill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', slug)) - .unique() - if (!skill || skill.softDeletedAt) return null + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", slug)) + .unique(); + if (!skill || skill.softDeletedAt) return null; - const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null + const latestVersion = skill.latestVersionId ? await ctx.db.get(skill.latestVersionId) : null; const fingerprintMatches = await ctx.db - .query('skillVersionFingerprints') - .withIndex('by_skill_fingerprint', (q) => q.eq('skillId', skill._id).eq('fingerprint', hash)) - .take(25) + .query("skillVersionFingerprints") + .withIndex("by_skill_fingerprint", (q) => q.eq("skillId", skill._id).eq("fingerprint", hash)) + .take(25); - let match: { version: string } | null = null + let match: { version: string } | null = null; if (fingerprintMatches.length > 0) { const newest = fingerprintMatches.reduce( (best, entry) => (entry.createdAt > best.createdAt ? entry : best), fingerprintMatches[0] as (typeof fingerprintMatches)[number], - ) - const version = await ctx.db.get(newest.versionId) + ); + const version = await ctx.db.get(newest.versionId); if (version && !version.softDeletedAt) { - match = { version: version.version } + match = { version: version.version }; } } if (!match) { const versions = await ctx.db - .query('skillVersions') - .withIndex('by_skill', (q) => q.eq('skillId', skill._id)) - .order('desc') - .take(200) + .query("skillVersions") + .withIndex("by_skill", (q) => q.eq("skillId", skill._id)) + .order("desc") + .take(200); for (const version of versions) { - if (version.softDeletedAt) continue - if (typeof version.fingerprint === 'string' && version.fingerprint === hash) { - match = { version: version.version } - break + if (version.softDeletedAt) continue; + if (typeof version.fingerprint === "string" && version.fingerprint === hash) { + match = { version: version.version }; + break; } const fingerprint = await hashSkillFiles( version.files.map((file) => ({ path: file.path, sha256: file.sha256 })), - ) + ); if (fingerprint === hash) { - match = { version: version.version } - break + match = { version: version.version }; + break; } } } @@ -2804,184 +2973,184 @@ export const resolveVersionByHash = query({ return { match, latestVersion: latestVersion ? { version: latestVersion.version } : null, - } + }; }, -}) +}); export const updateTags = mutation({ args: { - skillId: v.id('skills'), - tags: v.array(v.object({ tag: v.string(), versionId: v.id('skillVersions') })), + skillId: v.id("skills"), + tags: v.array(v.object({ tag: v.string(), versionId: v.id("skillVersions") })), }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); if (skill.ownerUserId !== user._id) { - assertModerator(user) + assertModerator(user); } - const nextTags = { ...skill.tags } + const nextTags = { ...skill.tags }; for (const entry of args.tags) { - nextTags[entry.tag] = entry.versionId + nextTags[entry.tag] = entry.versionId; } - const latestEntry = args.tags.find((entry) => entry.tag === 'latest') - const now = Date.now() + const latestEntry = args.tags.find((entry) => entry.tag === "latest"); + const now = Date.now(); await ctx.db.patch(skill._id, { tags: nextTags, latestVersionId: latestEntry ? latestEntry.versionId : skill.latestVersionId, updatedAt: now, - }) + }); if (latestEntry) { - await setSkillEmbeddingsLatestVersion(ctx, skill._id, latestEntry.versionId, now) + await setSkillEmbeddingsLatestVersion(ctx, skill._id, latestEntry.versionId, now); } }, -}) +}); export const setRedactionApproved = mutation({ - args: { skillId: v.id('skills'), approved: v.boolean() }, + args: { skillId: v.id("skills"), approved: v.boolean() }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) + const { user } = await requireUser(ctx); + assertAdmin(user); - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const now = Date.now() + const now = Date.now(); if (args.approved) { - await upsertSkillBadge(ctx, skill._id, 'redactionApproved', user._id, now) + await upsertSkillBadge(ctx, skill._id, "redactionApproved", user._id, now); } else { - await removeSkillBadge(ctx, skill._id, 'redactionApproved') + await removeSkillBadge(ctx, skill._id, "redactionApproved"); } await ctx.db.patch(skill._id, { lastReviewedAt: now, updatedAt: now, - }) + }); - await setSkillEmbeddingsApproved(ctx, skill._id, args.approved, now) + await setSkillEmbeddingsApproved(ctx, skill._id, args.approved, now); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: args.approved ? 'badge.set' : 'badge.unset', - targetType: 'skill', + action: args.approved ? "badge.set" : "badge.unset", + targetType: "skill", targetId: skill._id, - metadata: { badge: 'redactionApproved', approved: args.approved }, + metadata: { badge: "redactionApproved", approved: args.approved }, createdAt: now, - }) + }); }, -}) +}); export const setBatch = mutation({ - args: { skillId: v.id('skills'), batch: v.optional(v.string()) }, + args: { skillId: v.id("skills"), batch: v.optional(v.string()) }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') - const existingBadges = await getSkillBadgeMap(ctx, skill._id) - const previousHighlighted = isSkillHighlighted({ badges: existingBadges }) - const nextBatch = args.batch?.trim() || undefined - const nextHighlighted = nextBatch === 'highlighted' - const now = Date.now() + const { user } = await requireUser(ctx); + assertModerator(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); + const existingBadges = await getSkillBadgeMap(ctx, skill._id); + const previousHighlighted = isSkillHighlighted({ badges: existingBadges }); + const nextBatch = args.batch?.trim() || undefined; + const nextHighlighted = nextBatch === "highlighted"; + const now = Date.now(); if (nextHighlighted) { - await upsertSkillBadge(ctx, skill._id, 'highlighted', user._id, now) + await upsertSkillBadge(ctx, skill._id, "highlighted", user._id, now); } else { - await removeSkillBadge(ctx, skill._id, 'highlighted') + await removeSkillBadge(ctx, skill._id, "highlighted"); } await ctx.db.patch(skill._id, { batch: nextBatch, updatedAt: now, - }) - await ctx.db.insert('auditLogs', { + }); + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: 'badge.highlighted', - targetType: 'skill', + action: "badge.highlighted", + targetType: "skill", targetId: skill._id, metadata: { highlighted: nextHighlighted }, createdAt: now, - }) + }); if (nextHighlighted && !previousHighlighted) { - void queueHighlightedWebhook(ctx, skill._id) + void queueHighlightedWebhook(ctx, skill._id); } }, -}) +}); export const setSoftDeleted = mutation({ - args: { skillId: v.id('skills'), deleted: v.boolean() }, + args: { skillId: v.id("skills"), deleted: v.boolean() }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + assertModerator(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const now = Date.now() + const now = Date.now(); await ctx.db.patch(skill._id, { softDeletedAt: args.deleted ? now : undefined, - moderationStatus: args.deleted ? 'hidden' : 'active', + moderationStatus: args.deleted ? "hidden" : "active", hiddenAt: args.deleted ? now : undefined, hiddenBy: args.deleted ? user._id : undefined, lastReviewedAt: now, updatedAt: now, - }) + }); - await setSkillEmbeddingsSoftDeleted(ctx, skill._id, args.deleted, now) + await setSkillEmbeddingsSoftDeleted(ctx, skill._id, args.deleted, now); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: args.deleted ? 'skill.delete' : 'skill.undelete', - targetType: 'skill', + action: args.deleted ? "skill.delete" : "skill.undelete", + targetType: "skill", targetId: skill._id, metadata: { slug: skill.slug, softDeletedAt: args.deleted ? now : null }, createdAt: now, - }) + }); }, -}) +}); export const changeOwner = mutation({ - args: { skillId: v.id('skills'), ownerUserId: v.id('users') }, + args: { skillId: v.id("skills"), ownerUserId: v.id("users") }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + assertAdmin(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const nextOwner = await ctx.db.get(args.ownerUserId) + const nextOwner = await ctx.db.get(args.ownerUserId); if (!nextOwner || nextOwner.deletedAt || nextOwner.deactivatedAt) - throw new Error('User not found') + throw new Error("User not found"); - if (skill.ownerUserId === args.ownerUserId) return + if (skill.ownerUserId === args.ownerUserId) return; - const now = Date.now() + const now = Date.now(); await ctx.db.patch(skill._id, { ownerUserId: args.ownerUserId, lastReviewedAt: now, updatedAt: now, - }) + }); - const embeddings = await listSkillEmbeddingsForSkill(ctx, skill._id) + const embeddings = await listSkillEmbeddingsForSkill(ctx, skill._id); for (const embedding of embeddings) { await ctx.db.patch(embedding._id, { ownerId: args.ownerUserId, updatedAt: now, - }) + }); } - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: 'skill.owner.change', - targetType: 'skill', + action: "skill.owner.change", + targetType: "skill", targetId: skill._id, metadata: { from: skill.ownerUserId, to: args.ownerUserId }, createdAt: now, - }) + }); }, -}) +}); /** * Admin-only: reclaim a squatted slug by hard-deleting the squatter's skill @@ -2990,42 +3159,42 @@ export const changeOwner = mutation({ export const reclaimSlug = mutation({ args: { slug: v.string(), - rightfulOwnerUserId: v.id('users'), + rightfulOwnerUserId: v.id("users"), reason: v.optional(v.string()), }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) + const { user } = await requireUser(ctx); + assertAdmin(user); - const slug = args.slug.trim().toLowerCase() - if (!slug) throw new Error('Slug required') + const slug = args.slug.trim().toLowerCase(); + if (!slug) throw new Error("Slug required"); - const rightfulOwner = await ctx.db.get(args.rightfulOwnerUserId) - if (!rightfulOwner) throw new Error('Rightful owner not found') + const rightfulOwner = await ctx.db.get(args.rightfulOwnerUserId); + if (!rightfulOwner) throw new Error("Rightful owner not found"); - const now = Date.now() + const now = Date.now(); // Check if slug is currently occupied by someone else const existingSkill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', slug)) - .unique() + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", slug)) + .unique(); if (existingSkill) { if (existingSkill.ownerUserId === args.rightfulOwnerUserId) { - return { ok: true as const, action: 'already_owned' } + return { ok: true as const, action: "already_owned" }; } // Hard-delete the squatter's skill await ctx.scheduler.runAfter(0, internal.skills.hardDeleteInternal, { skillId: existingSkill._id, actorUserId: user._id, - }) + }); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: 'slug.reclaim', - targetType: 'skill', + action: "slug.reclaim", + targetType: "skill", targetId: existingSkill._id, metadata: { slug, @@ -3034,7 +3203,7 @@ export const reclaimSlug = mutation({ reason: args.reason || undefined, }, createdAt: now, - }) + }); } await upsertReservedSlugForRightfulOwner(ctx, { @@ -3042,46 +3211,46 @@ export const reclaimSlug = mutation({ rightfulOwnerUserId: args.rightfulOwnerUserId, deletedAt: now, expiresAt: now + SLUG_RESERVATION_MS, - reason: args.reason || 'slug.reclaimed', - }) + reason: args.reason || "slug.reclaimed", + }); return { ok: true as const, - action: existingSkill ? 'reclaimed_from_squatter' : 'reserved', - } + action: existingSkill ? "reclaimed_from_squatter" : "reserved", + }; }, -}) +}); /** * Admin-only: reclaim slugs in bulk. Useful for recovering multiple squatted slugs at once. */ export const reclaimSlugInternal = internalMutation({ args: { - actorUserId: v.id('users'), + actorUserId: v.id("users"), slug: v.string(), - rightfulOwnerUserId: v.id('users'), + rightfulOwnerUserId: v.id("users"), reason: v.optional(v.string()), }, handler: async (ctx, args) => { - const actor = await ctx.db.get(args.actorUserId) - if (!actor || actor.deletedAt || actor.deactivatedAt) throw new Error('User not found') - assertAdmin(actor) + const actor = await ctx.db.get(args.actorUserId); + if (!actor || actor.deletedAt || actor.deactivatedAt) throw new Error("User not found"); + assertAdmin(actor); - const slug = args.slug.trim().toLowerCase() - if (!slug) throw new Error('Slug required') + const slug = args.slug.trim().toLowerCase(); + if (!slug) throw new Error("Slug required"); - const now = Date.now() + const now = Date.now(); const existingSkill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', slug)) - .unique() + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", slug)) + .unique(); if (existingSkill && existingSkill.ownerUserId !== args.rightfulOwnerUserId) { await ctx.scheduler.runAfter(0, internal.skills.hardDeleteInternal, { skillId: existingSkill._id, actorUserId: args.actorUserId, - }) + }); } await upsertReservedSlugForRightfulOwner(ctx, { @@ -3089,37 +3258,39 @@ export const reclaimSlugInternal = internalMutation({ rightfulOwnerUserId: args.rightfulOwnerUserId, deletedAt: now, expiresAt: now + SLUG_RESERVATION_MS, - reason: args.reason || 'slug.reclaimed', - }) + reason: args.reason || "slug.reclaimed", + }); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: args.actorUserId, - action: 'slug.reclaim', - targetType: 'slug', + action: "slug.reclaim", + targetType: "slug", targetId: slug, metadata: { slug, rightfulOwnerUserId: args.rightfulOwnerUserId, - hadSquatter: Boolean(existingSkill && existingSkill.ownerUserId !== args.rightfulOwnerUserId), + hadSquatter: Boolean( + existingSkill && existingSkill.ownerUserId !== args.rightfulOwnerUserId, + ), reason: args.reason || undefined, }, createdAt: now, - }) + }); - return { ok: true as const } + return { ok: true as const }; }, -}) +}); export const setDuplicate = mutation({ - args: { skillId: v.id('skills'), canonicalSlug: v.optional(v.string()) }, + args: { skillId: v.id("skills"), canonicalSlug: v.optional(v.string()) }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertModerator(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + assertModerator(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const now = Date.now() - const canonicalSlug = args.canonicalSlug?.trim().toLowerCase() + const now = Date.now(); + const canonicalSlug = args.canonicalSlug?.trim().toLowerCase(); if (!canonicalSlug) { await ctx.db.patch(skill._id, { @@ -3127,146 +3298,146 @@ export const setDuplicate = mutation({ forkOf: undefined, lastReviewedAt: now, updatedAt: now, - }) - await ctx.db.insert('auditLogs', { + }); + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: 'skill.duplicate.clear', - targetType: 'skill', + action: "skill.duplicate.clear", + targetType: "skill", targetId: skill._id, metadata: { canonicalSlug: null }, createdAt: now, - }) - return + }); + return; } const canonical = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', canonicalSlug)) - .unique() - if (!canonical) throw new Error('Canonical skill not found') - if (canonical._id === skill._id) throw new Error('Cannot duplicate a skill onto itself') + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", canonicalSlug)) + .unique(); + if (!canonical) throw new Error("Canonical skill not found"); + if (canonical._id === skill._id) throw new Error("Cannot duplicate a skill onto itself"); const canonicalVersion = canonical.latestVersionId ? await ctx.db.get(canonical.latestVersionId) - : null + : null; await ctx.db.patch(skill._id, { canonicalSkillId: canonical._id, forkOf: { skillId: canonical._id, - kind: 'duplicate', + kind: "duplicate", version: canonicalVersion?.version, at: now, }, lastReviewedAt: now, updatedAt: now, - }) + }); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: 'skill.duplicate.set', - targetType: 'skill', + action: "skill.duplicate.set", + targetType: "skill", targetId: skill._id, metadata: { canonicalSlug }, createdAt: now, - }) + }); }, -}) +}); export const setOfficialBadge = mutation({ - args: { skillId: v.id('skills'), official: v.boolean() }, + args: { skillId: v.id("skills"), official: v.boolean() }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + assertAdmin(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const now = Date.now() + const now = Date.now(); if (args.official) { - await upsertSkillBadge(ctx, skill._id, 'official', user._id, now) + await upsertSkillBadge(ctx, skill._id, "official", user._id, now); } else { - await removeSkillBadge(ctx, skill._id, 'official') + await removeSkillBadge(ctx, skill._id, "official"); } await ctx.db.patch(skill._id, { lastReviewedAt: now, updatedAt: now, - }) + }); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: args.official ? 'badge.official.set' : 'badge.official.unset', - targetType: 'skill', + action: args.official ? "badge.official.set" : "badge.official.unset", + targetType: "skill", targetId: skill._id, metadata: { official: args.official }, createdAt: now, - }) + }); }, -}) +}); export const setDeprecatedBadge = mutation({ - args: { skillId: v.id('skills'), deprecated: v.boolean() }, + args: { skillId: v.id("skills"), deprecated: v.boolean() }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') + const { user } = await requireUser(ctx); + assertAdmin(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); - const now = Date.now() + const now = Date.now(); if (args.deprecated) { - await upsertSkillBadge(ctx, skill._id, 'deprecated', user._id, now) + await upsertSkillBadge(ctx, skill._id, "deprecated", user._id, now); } else { - await removeSkillBadge(ctx, skill._id, 'deprecated') + await removeSkillBadge(ctx, skill._id, "deprecated"); } await ctx.db.patch(skill._id, { lastReviewedAt: now, updatedAt: now, - }) + }); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: user._id, - action: args.deprecated ? 'badge.deprecated.set' : 'badge.deprecated.unset', - targetType: 'skill', + action: args.deprecated ? "badge.deprecated.set" : "badge.deprecated.unset", + targetType: "skill", targetId: skill._id, metadata: { deprecated: args.deprecated }, createdAt: now, - }) + }); }, -}) +}); export const hardDelete = mutation({ - args: { skillId: v.id('skills') }, + args: { skillId: v.id("skills") }, handler: async (ctx, args) => { - const { user } = await requireUser(ctx) - assertAdmin(user) - const skill = await ctx.db.get(args.skillId) - if (!skill) throw new Error('Skill not found') - await hardDeleteSkillStep(ctx, skill, user._id, 'versions') + const { user } = await requireUser(ctx); + assertAdmin(user); + const skill = await ctx.db.get(args.skillId); + if (!skill) throw new Error("Skill not found"); + await hardDeleteSkillStep(ctx, skill, user._id, "versions"); }, -}) +}); export const hardDeleteInternal = internalMutation({ - args: { skillId: v.id('skills'), actorUserId: v.id('users'), phase: v.optional(v.string()) }, + args: { skillId: v.id("skills"), actorUserId: v.id("users"), phase: v.optional(v.string()) }, handler: async (ctx, args) => { - const actor = await ctx.db.get(args.actorUserId) - if (!actor || actor.deletedAt || actor.deactivatedAt) throw new Error('User not found') - assertAdmin(actor) - const skill = await ctx.db.get(args.skillId) - if (!skill) return - const phase = isHardDeletePhase(args.phase) ? args.phase : 'versions' - await hardDeleteSkillStep(ctx, skill, actor._id, phase) + const actor = await ctx.db.get(args.actorUserId); + if (!actor || actor.deletedAt || actor.deactivatedAt) throw new Error("User not found"); + assertAdmin(actor); + const skill = await ctx.db.get(args.skillId); + if (!skill) return; + const phase = isHardDeletePhase(args.phase) ? args.phase : "versions"; + await hardDeleteSkillStep(ctx, skill, actor._id, phase); }, -}) +}); export const insertVersion = internalMutation({ args: { - userId: v.id('users'), + userId: v.id("users"), slug: v.string(), displayName: v.string(), version: v.string(), changelog: v.string(), - changelogSource: v.optional(v.union(v.literal('auto'), v.literal('user'))), + changelogSource: v.optional(v.union(v.literal("auto"), v.literal("user"))), tags: v.optional(v.array(v.string())), fingerprint: v.string(), bypassNewSkillRateLimit: v.optional(v.boolean()), @@ -3280,7 +3451,7 @@ export const insertVersion = internalMutation({ v.object({ path: v.string(), size: v.number(), - storageId: v.id('_storage'), + storageId: v.id("_storage"), sha256: v.string(), contentType: v.optional(v.string()), }), @@ -3293,10 +3464,10 @@ export const insertVersion = internalMutation({ summary: v.optional(v.string()), qualityAssessment: v.optional( v.object({ - decision: v.union(v.literal('pass'), v.literal('quarantine'), v.literal('reject')), + decision: v.union(v.literal("pass"), v.literal("quarantine"), v.literal("reject")), score: v.number(), reason: v.string(), - trustTier: v.union(v.literal('low'), v.literal('medium'), v.literal('trusted')), + trustTier: v.union(v.literal("low"), v.literal("medium"), v.literal("trusted")), similarRecentCount: v.number(), signals: v.object({ bodyChars: v.number(), @@ -3310,32 +3481,51 @@ export const insertVersion = internalMutation({ }), }), ), + staticScan: v.optional( + v.object({ + status: v.union(v.literal("clean"), v.literal("suspicious"), v.literal("malicious")), + reasonCodes: v.array(v.string()), + findings: v.array( + v.object({ + code: v.string(), + severity: v.union(v.literal("info"), v.literal("warn"), v.literal("critical")), + file: v.string(), + line: v.number(), + message: v.string(), + evidence: v.string(), + }), + ), + summary: v.string(), + engineVersion: v.string(), + checkedAt: v.number(), + }), + ), embedding: v.array(v.number()), }, handler: async (ctx, args) => { - const userId = args.userId - const user = await ctx.db.get(userId) - if (!user || user.deletedAt || user.deactivatedAt) throw new Error('User not found') + const userId = args.userId; + const user = await ctx.db.get(userId); + if (!user || user.deletedAt || user.deactivatedAt) throw new Error("User not found"); - const now = Date.now() + const now = Date.now(); let skill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', args.slug)) - .unique() + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", args.slug)) + .unique(); if (skill && skill.ownerUserId !== userId) { // Fallback: Convex Auth can create duplicate `users` records. Heal ownership ONLY // when the underlying GitHub identity matches (authAccounts.providerAccountId). - const owner = await ctx.db.get(skill.ownerUserId) + const owner = await ctx.db.get(skill.ownerUserId); if (!owner || owner.deletedAt || owner.deactivatedAt) { - throw new Error('Only the owner can publish updates') + throw new Error("Only the owner can publish updates"); } const [ownerProviderAccountId, callerProviderAccountId] = await Promise.all([ getGitHubProviderAccountId(ctx, skill.ownerUserId), getGitHubProviderAccountId(ctx, userId), - ]) + ]); // Deny healing when GitHub identity isn't present/consistent. if ( @@ -3344,28 +3534,29 @@ export const insertVersion = internalMutation({ callerProviderAccountId, ) ) { - throw new Error('Only the owner can publish updates') + throw new Error("Only the owner can publish updates"); } - await ctx.db.patch(skill._id, { ownerUserId: userId, updatedAt: now }) - skill = { ...skill, ownerUserId: userId } + await ctx.db.patch(skill._id, { ownerUserId: userId, updatedAt: now }); + skill = { ...skill, ownerUserId: userId }; } - const qualityAssessment = args.qualityAssessment - const isQualityQuarantine = qualityAssessment?.decision === 'quarantine' + const qualityAssessment = args.qualityAssessment; + const isQualityQuarantine = qualityAssessment?.decision === "quarantine"; // Trusted publishers (and moderators/admins) bypass auto-hide for pending scans. // Keep moderationReason as pending.scan so the VT poller keeps working. const isTrustedPublisher = Boolean( - user.trustedPublisher || user.role === 'admin' || user.role === 'moderator', - ) + user.trustedPublisher || user.role === "admin" || user.role === "moderator", + ); const initialModerationStatus = - isTrustedPublisher && !isQualityQuarantine ? 'active' : 'hidden' + isTrustedPublisher && !isQualityQuarantine ? "active" : "hidden"; - const moderationReason = isQualityQuarantine ? 'quality.low' : 'pending.scan' + const moderationReason = isQualityQuarantine ? "quality.low" : "pending.scan"; const moderationNotes = isQualityQuarantine ? `Auto-quarantined by quality gate (score=${qualityAssessment.score}, tier=${qualityAssessment.trustTier}, similar=${qualityAssessment.similarRecentCount}).` - : undefined + : undefined; + const staticSnapshot = buildModerationSnapshot({ staticScan: args.staticScan }); const qualityRecord = qualityAssessment ? { @@ -3377,63 +3568,66 @@ export const insertVersion = internalMutation({ signals: qualityAssessment.signals, evaluatedAt: now, } - : undefined + : undefined; if (!skill) { // Anti-squatting: enforce reserved slug cooldown. - await enforceReservedSlugCooldownForNewSkill(ctx, { slug: args.slug, userId, now }) + await enforceReservedSlugCooldownForNewSkill(ctx, { slug: args.slug, userId, now }); if (!args.bypassNewSkillRateLimit) { - const ownerTrustSignals = await getOwnerTrustSignals(ctx, user, now) - enforceNewSkillRateLimit(ownerTrustSignals) + const ownerTrustSignals = await getOwnerTrustSignals(ctx, user, now); + enforceNewSkillRateLimit(ownerTrustSignals); } - const forkOfSlug = args.forkOf?.slug.trim().toLowerCase() || '' - const forkOfVersion = args.forkOf?.version?.trim() || undefined + const forkOfSlug = args.forkOf?.slug.trim().toLowerCase() || ""; + const forkOfVersion = args.forkOf?.version?.trim() || undefined; - let canonicalSkillId: Id<'skills'> | undefined + let canonicalSkillId: Id<"skills"> | undefined; let forkOf: | { - skillId: Id<'skills'> - kind: 'fork' | 'duplicate' - version?: string - at: number + skillId: Id<"skills">; + kind: "fork" | "duplicate"; + version?: string; + at: number; } - | undefined + | undefined; if (forkOfSlug) { const upstream = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', forkOfSlug)) - .unique() - if (!upstream || upstream.softDeletedAt) throw new Error('Upstream skill not found') - canonicalSkillId = upstream.canonicalSkillId ?? upstream._id + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", forkOfSlug)) + .unique(); + if (!upstream || upstream.softDeletedAt) throw new Error("Upstream skill not found"); + canonicalSkillId = upstream.canonicalSkillId ?? upstream._id; forkOf = { skillId: upstream._id, - kind: 'fork', + kind: "fork", version: forkOfVersion, at: now, - } + }; } else { - const match = await findCanonicalSkillForFingerprint(ctx, args.fingerprint) + const match = await findCanonicalSkillForFingerprint(ctx, args.fingerprint); if (match) { - canonicalSkillId = match.canonicalSkillId ?? match._id + canonicalSkillId = match.canonicalSkillId ?? match._id; forkOf = { skillId: match._id, - kind: 'duplicate', + kind: "duplicate", at: now, - } + }; } } - const summary = args.summary ?? getFrontmatterValue(args.parsed.frontmatter, 'description') - const summaryValue = summary ?? undefined - const moderationFlags = deriveModerationFlags({ + const summary = args.summary ?? getFrontmatterValue(args.parsed.frontmatter, "description"); + const summaryValue = summary ?? undefined; + const derivedFlags = deriveModerationFlags({ skill: { slug: args.slug, displayName: args.displayName, summary: summaryValue }, parsed: args.parsed, files: args.files, - }) - const skillId = await ctx.db.insert('skills', { + }); + const moderationFlags = Array.from( + new Set([...(derivedFlags ?? []), ...(staticSnapshot.legacyFlags ?? [])]), + ); + const skillId = await ctx.db.insert("skills", { slug: args.slug, displayName: args.displayName, summary: summaryValue, @@ -3452,6 +3646,15 @@ export const insertVersion = internalMutation({ moderationStatus: initialModerationStatus, moderationReason, moderationNotes, + moderationVerdict: staticSnapshot.verdict, + moderationReasonCodes: staticSnapshot.reasonCodes.length + ? staticSnapshot.reasonCodes + : undefined, + moderationEvidence: staticSnapshot.evidence.length ? staticSnapshot.evidence : undefined, + moderationSummary: staticSnapshot.summary, + moderationEngineVersion: staticSnapshot.engineVersion, + moderationEvaluatedAt: staticSnapshot.evaluatedAt, + moderationSourceVersionId: undefined, quality: qualityRecord, moderationFlags: moderationFlags.length ? moderationFlags : undefined, reportCount: 0, @@ -3470,21 +3673,21 @@ export const insertVersion = internalMutation({ }, createdAt: now, updatedAt: now, - }) - skill = await ctx.db.get(skillId) + }); + skill = await ctx.db.get(skillId); } - if (!skill) throw new Error('Skill creation failed') + if (!skill) throw new Error("Skill creation failed"); const existingVersion = await ctx.db - .query('skillVersions') - .withIndex('by_skill_version', (q) => q.eq('skillId', skill._id).eq('version', args.version)) - .unique() + .query("skillVersions") + .withIndex("by_skill_version", (q) => q.eq("skillId", skill._id).eq("version", args.version)) + .unique(); if (existingVersion) { - throw new Error('Version already exists') + throw new Error("Version already exists"); } - const versionId = await ctx.db.insert('skillVersions', { + const versionId = await ctx.db.insert("skillVersions", { skillId: skill._id, version: args.version, fingerprint: args.fingerprint, @@ -3492,26 +3695,34 @@ export const insertVersion = internalMutation({ changelogSource: args.changelogSource, files: args.files, parsed: args.parsed, + staticScan: args.staticScan, createdBy: userId, createdAt: now, softDeletedAt: undefined, - }) + }); - const nextTags: Record> = { ...skill.tags } - nextTags.latest = versionId + const nextTags: Record> = { ...skill.tags }; + nextTags.latest = versionId; for (const tag of args.tags ?? []) { - nextTags[tag] = versionId + nextTags[tag] = versionId; } - const latestBefore = skill.latestVersionId + const latestBefore = skill.latestVersionId; const nextSummary = - args.summary ?? getFrontmatterValue(args.parsed.frontmatter, 'description') ?? skill.summary - const moderationFlags = deriveModerationFlags({ + args.summary ?? getFrontmatterValue(args.parsed.frontmatter, "description") ?? skill.summary; + const derivedFlags = deriveModerationFlags({ skill: { slug: skill.slug, displayName: args.displayName, summary: nextSummary ?? undefined }, parsed: args.parsed, files: args.files, - }) + }); + const moderationSnapshot = buildModerationSnapshot({ + staticScan: args.staticScan, + sourceVersionId: versionId, + }); + const moderationFlags = Array.from( + new Set([...(derivedFlags ?? []), ...(moderationSnapshot.legacyFlags ?? [])]), + ); await ctx.db.patch(skill._id, { displayName: args.displayName, @@ -3523,15 +3734,26 @@ export const insertVersion = internalMutation({ moderationStatus: initialModerationStatus, moderationReason, moderationNotes, + moderationVerdict: moderationSnapshot.verdict, + moderationReasonCodes: moderationSnapshot.reasonCodes.length + ? moderationSnapshot.reasonCodes + : undefined, + moderationEvidence: moderationSnapshot.evidence.length + ? moderationSnapshot.evidence + : undefined, + moderationSummary: moderationSnapshot.summary, + moderationEngineVersion: moderationSnapshot.engineVersion, + moderationEvaluatedAt: moderationSnapshot.evaluatedAt, + moderationSourceVersionId: versionId, quality: qualityRecord ?? skill.quality, moderationFlags: moderationFlags.length ? moderationFlags : undefined, updatedAt: now, - }) + }); - const badgeMap = await getSkillBadgeMap(ctx, skill._id) - const isApproved = Boolean(badgeMap.redactionApproved) + const badgeMap = await getSkillBadgeMap(ctx, skill._id); + const isApproved = Boolean(badgeMap.redactionApproved); - const embeddingId = await ctx.db.insert('skillEmbeddings', { + const embeddingId = await ctx.db.insert("skillEmbeddings", { skillId: skill._id, versionId, ownerId: userId, @@ -3540,100 +3762,100 @@ export const insertVersion = internalMutation({ isApproved, visibility: embeddingVisibilityFor(true, isApproved), updatedAt: now, - }) + }); if (latestBefore) { const previousEmbedding = await ctx.db - .query('skillEmbeddings') - .withIndex('by_version', (q) => q.eq('versionId', latestBefore)) - .unique() + .query("skillEmbeddings") + .withIndex("by_version", (q) => q.eq("versionId", latestBefore)) + .unique(); if (previousEmbedding) { await ctx.db.patch(previousEmbedding._id, { isLatest: false, visibility: embeddingVisibilityFor(false, previousEmbedding.isApproved), updatedAt: now, - }) + }); } } - await ctx.db.insert('skillVersionFingerprints', { + await ctx.db.insert("skillVersionFingerprints", { skillId: skill._id, versionId, fingerprint: args.fingerprint, createdAt: now, - }) + }); - return { skillId: skill._id, versionId, embeddingId } + return { skillId: skill._id, versionId, embeddingId }; }, -}) +}); export const setSkillSoftDeletedInternal = internalMutation({ args: { - userId: v.id('users'), + userId: v.id("users"), slug: v.string(), deleted: v.boolean(), }, handler: async (ctx, args) => { - const user = await ctx.db.get(args.userId) - if (!user || user.deletedAt || user.deactivatedAt) throw new Error('User not found') + const user = await ctx.db.get(args.userId); + if (!user || user.deletedAt || user.deactivatedAt) throw new Error("User not found"); - const slug = args.slug.trim().toLowerCase() - if (!slug) throw new Error('Slug required') + const slug = args.slug.trim().toLowerCase(); + if (!slug) throw new Error("Slug required"); const skill = await ctx.db - .query('skills') - .withIndex('by_slug', (q) => q.eq('slug', slug)) - .unique() - if (!skill) throw new Error('Skill not found') + .query("skills") + .withIndex("by_slug", (q) => q.eq("slug", slug)) + .unique(); + if (!skill) throw new Error("Skill not found"); if (skill.ownerUserId !== args.userId) { - assertModerator(user) + assertModerator(user); } - const now = Date.now() + const now = Date.now(); await ctx.db.patch(skill._id, { softDeletedAt: args.deleted ? now : undefined, - moderationStatus: args.deleted ? 'hidden' : 'active', + moderationStatus: args.deleted ? "hidden" : "active", hiddenAt: args.deleted ? now : undefined, hiddenBy: args.deleted ? args.userId : undefined, lastReviewedAt: now, updatedAt: now, - }) + }); - await setSkillEmbeddingsSoftDeleted(ctx, skill._id, args.deleted, now) + await setSkillEmbeddingsSoftDeleted(ctx, skill._id, args.deleted, now); - await ctx.db.insert('auditLogs', { + await ctx.db.insert("auditLogs", { actorUserId: args.userId, - action: args.deleted ? 'skill.delete' : 'skill.undelete', - targetType: 'skill', + action: args.deleted ? "skill.delete" : "skill.undelete", + targetType: "skill", targetId: skill._id, metadata: { slug, softDeletedAt: args.deleted ? now : null }, createdAt: now, - }) + }); - return { ok: true as const } + return { ok: true as const }; }, -}) +}); function clampInt(value: number, min: number, max: number) { - const rounded = Number.isFinite(value) ? Math.round(value) : min - return Math.min(max, Math.max(min, rounded)) + const rounded = Number.isFinite(value) ? Math.round(value) : min; + return Math.min(max, Math.max(min, rounded)); } async function findCanonicalSkillForFingerprint( - ctx: { db: MutationCtx['db'] }, + ctx: { db: MutationCtx["db"] }, fingerprint: string, ) { const matches = await ctx.db - .query('skillVersionFingerprints') - .withIndex('by_fingerprint', (q) => q.eq('fingerprint', fingerprint)) - .take(25) + .query("skillVersionFingerprints") + .withIndex("by_fingerprint", (q) => q.eq("fingerprint", fingerprint)) + .take(25); for (const entry of matches) { - const skill = await ctx.db.get(entry.skillId) - if (!skill || skill.softDeletedAt) continue - return skill + const skill = await ctx.db.get(entry.skillId); + if (!skill || skill.softDeletedAt) continue; + return skill; } - return null + return null; } diff --git a/convex/vt.ts b/convex/vt.ts index 2838bcb87..2f8a35de0 100644 --- a/convex/vt.ts +++ b/convex/vt.ts @@ -1,8 +1,8 @@ -import { v } from 'convex/values' -import { internal } from './_generated/api' -import type { Id } from './_generated/dataModel' -import { action, internalAction, internalMutation } from './_generated/server' -import { buildDeterministicZip } from './lib/skillZip' +import { v } from "convex/values"; +import type { Id } from "./_generated/dataModel"; +import { internal } from "./_generated/api"; +import { action, internalAction, internalMutation } from "./_generated/server"; +import { buildDeterministicZip } from "./lib/skillZip"; /** * Fix skills that have version.vtAnalysis but null skill.moderationReason. @@ -11,51 +11,51 @@ import { buildDeterministicZip } from './lib/skillZip' export const fixNullModerationReasons = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; const skills: UnscannedActiveSkill[] = await ctx.runQuery( internal.skills.getUnscannedActiveSkillsInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:fixNull] No skills with null reason found') - return { total: 0, fixed: 0, noVtAnalysis: 0 } + console.log("[vt:fixNull] No skills with null reason found"); + return { total: 0, fixed: 0, noVtAnalysis: 0 }; } - console.log(`[vt:fixNull] Checking ${skills.length} skills with null moderationReason`) + console.log(`[vt:fixNull] Checking ${skills.length} skills with null moderationReason`); - let fixed = 0 - let noVtAnalysis = 0 + let fixed = 0; + let noVtAnalysis = 0; for (const { versionId, slug } of skills) { - if (!versionId) continue + if (!versionId) continue; - const version = await ctx.runQuery(internal.skills.getVersionByIdInternal, { versionId }) + const version = await ctx.runQuery(internal.skills.getVersionByIdInternal, { versionId }); if (!version?.vtAnalysis || !version.sha256hash) { - noVtAnalysis++ - continue + noVtAnalysis++; + continue; } // Version has vtAnalysis - update the skill's moderationReason - const status = version.vtAnalysis.status + const status = version.vtAnalysis.status; await ctx.runMutation(internal.skills.approveSkillByHashInternal, { sha256hash: version.sha256hash, - scanner: 'vt', + scanner: "vt", status, - }) - fixed++ - console.log(`[vt:fixNull] Fixed ${slug} -> ${status}`) + }); + fixed++; + console.log(`[vt:fixNull] Fixed ${slug} -> ${status}`); } - const result: FixNullModerationReasonsResult = { total: skills.length, fixed, noVtAnalysis } - console.log('[vt:fixNull] Complete:', result) - return result + const result: FixNullModerationReasonsResult = { total: skills.length, fixed, noVtAnalysis }; + console.log("[vt:fixNull] Complete:", result); + return result; }, -}) +}); export const logScanResultInternal = internalMutation({ args: { - type: v.union(v.literal('daily_rescan'), v.literal('backfill'), v.literal('pending_poll')), + type: v.union(v.literal("daily_rescan"), v.literal("backfill"), v.literal("pending_poll")), total: v.number(), updated: v.number(), unchanged: v.number(), @@ -71,7 +71,7 @@ export const logScanResultInternal = internalMutation({ durationMs: v.number(), }, handler: async (ctx, args) => { - await ctx.db.insert('vtScanLogs', { + await ctx.db.insert("vtScanLogs", { type: args.type, total: args.total, updated: args.updated, @@ -80,153 +80,161 @@ export const logScanResultInternal = internalMutation({ flaggedSkills: args.flaggedSkills, durationMs: args.durationMs, createdAt: Date.now(), - }) + }); }, -}) +}); -const BENIGN_VERDICTS = new Set(['benign', 'clean']) -const MALICIOUS_VERDICTS = new Set(['malicious']) -const SUSPICIOUS_VERDICTS = new Set(['suspicious']) +const BENIGN_VERDICTS = new Set(["benign", "clean"]); +const MALICIOUS_VERDICTS = new Set(["malicious"]); +const SUSPICIOUS_VERDICTS = new Set(["suspicious"]); function normalizeVerdict(value?: string) { - return value?.trim().toLowerCase() ?? '' + return value?.trim().toLowerCase() ?? ""; } function verdictToStatus(verdict: string) { - if (BENIGN_VERDICTS.has(verdict)) return 'clean' - if (MALICIOUS_VERDICTS.has(verdict)) return 'malicious' - if (SUSPICIOUS_VERDICTS.has(verdict)) return 'suspicious' - return 'pending' + if (BENIGN_VERDICTS.has(verdict)) return "clean"; + if (MALICIOUS_VERDICTS.has(verdict)) return "malicious"; + if (SUSPICIOUS_VERDICTS.has(verdict)) return "suspicious"; + return "pending"; } type VTAIResult = { - category: string - verdict: string - analysis?: string - source?: string -} + category: string; + verdict: string; + analysis?: string; + source?: string; +}; type VTFileResponse = { data: { attributes: { - sha256: string - crowdsourced_ai_results?: VTAIResult[] + sha256: string; + crowdsourced_ai_results?: VTAIResult[]; last_analysis_stats?: { - malicious: number - suspicious: number - undetected: number - harmless: number - } - } - } -} + malicious: number; + suspicious: number; + undetected: number; + harmless: number; + }; + }; + }; +}; type ScanQueueHealth = { - queueSize: number - staleCount: number - veryStaleCount: number - oldestAgeMinutes: number - healthy: boolean -} + queueSize: number; + staleCount: number; + veryStaleCount: number; + oldestAgeMinutes: number; + healthy: boolean; +}; type PendingScanSkill = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> | null - sha256hash: string | null - checkCount: number -} + skillId: Id<"skills">; + versionId: Id<"skillVersions"> | null; + sha256hash: string | null; + checkCount: number; +}; type PollPendingScansResult = { - processed: number - updated: number - staled?: number - healthy: boolean - queueSize?: number -} + processed: number; + updated: number; + staled?: number; + healthy: boolean; + queueSize?: number; +}; type BackfillPendingScansResult = | { - total: number - updated: number - rescansRequested: number - noHash: number - notInVT: number - errors: number - remaining: number + total: number; + updated: number; + rescansRequested: number; + noHash: number; + notInVT: number; + errors: number; + remaining: number; } - | { error: string } + | { error: string }; type UnscannedActiveSkill = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; +}; type LegacyPendingScanSkill = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - hasHash: boolean -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + hasHash: boolean; +}; type ActiveSkillsMissingVTCache = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - sha256hash: string - slug: string -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + sha256hash: string; + slug: string; +}; type PendingVTSkill = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - sha256hash: string -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + sha256hash: string; +}; type NullModerationStatusSkill = { - skillId: Id<'skills'> - slug: string - moderationReason: string | undefined -} + skillId: Id<"skills">; + slug: string; + moderationReason: string | undefined; +}; type StaleModerationReasonSkill = { - skillId: Id<'skills'> - versionId: Id<'skillVersions'> - slug: string - currentReason: string - vtStatus: string | null -} + skillId: Id<"skills">; + versionId: Id<"skillVersions">; + slug: string; + currentReason: string; + vtStatus: string | null; +}; type FixNullModerationReasonsResult = { - total: number - fixed: number - noVtAnalysis: number -} + total: number; + fixed: number; + noVtAnalysis: number; +}; type ScanUnscannedSkillsResult = | { total: number; scanned: number; errors: number; durationMs?: number } - | { error: string } + | { error: string }; type ScanLegacySkillsResult = | { total: number; scanned: number; errors: number; alreadyHasHash?: number; durationMs?: number } - | { error: string } + | { error: string }; type BackfillActiveSkillsVTCacheResult = | { total: number; updated: number; noResults: number; errors: number; done: boolean } - | { error: string } + | { error: string }; type RequestReanalysisForPendingResult = | { total: number; requested: number; errors?: number; done: boolean } - | { error: string } + | { error: string }; -type FixNullModerationStatusResult = { total: number; fixed: number; done: boolean } +type FixNullModerationStatusResult = { total: number; fixed: number; done: boolean }; type SyncModerationReasonsResult = { - total: number - synced: number - noVtAnalysis: number - done: boolean -} + total: number; + synced: number; + noVtAnalysis: number; + done: boolean; +}; + +type BackfillModerationV2Result = { + total: number; + updated: number; + errors: number; + durationMs?: number; + done?: boolean; +}; export const fetchResults = action({ args: { @@ -234,55 +242,55 @@ export const fetchResults = action({ }, handler: async (_ctx, args) => { if (!args.sha256hash) { - return { status: 'not_found' } + return { status: "not_found" }; } - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - return { status: 'error', message: 'VT_API_KEY not configured' } + return { status: "error", message: "VT_API_KEY not configured" }; } try { const response = await fetch(`https://www.virustotal.com/api/v3/files/${args.sha256hash}`, { - method: 'GET', + method: "GET", headers: { - 'x-apikey': apiKey, + "x-apikey": apiKey, }, - }) + }); if (response.status === 404) { - return { status: 'not_found' } + return { status: "not_found" }; } if (!response.ok) { - return { status: 'error' } + return { status: "error" }; } - const data = (await response.json()) as VTFileResponse + const data = (await response.json()) as VTFileResponse; const aiResult = data.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); - const stats = data.data.attributes.last_analysis_stats - let status = 'pending' + const stats = data.data.attributes.last_analysis_stats; + let status = "pending"; if (aiResult?.verdict) { // Prioritize AI Analysis (Code Insight) - status = verdictToStatus(normalizeVerdict(aiResult.verdict)) + status = verdictToStatus(normalizeVerdict(aiResult.verdict)); } else if (stats) { // Fallback to AV engines if (stats.malicious > 0) { - status = 'malicious' + status = "malicious"; } else if (stats.suspicious > 0) { - status = 'suspicious' + status = "suspicious"; } else if (stats.harmless > 0) { - status = 'clean' + status = "clean"; } } return { status, - source: aiResult?.verdict ? 'code_insight' : 'engines', + source: aiResult?.verdict ? "code_insight" : "engines", url: `https://www.virustotal.com/gui/file/${args.sha256hash}`, metadata: { aiVerdict: aiResult?.verdict, @@ -290,57 +298,57 @@ export const fetchResults = action({ aiSource: aiResult?.source, stats: stats, }, - } + }; } catch (error) { - console.error('Error fetching VT results:', error) - return { status: 'error' } + console.error("Error fetching VT results:", error); + return { status: "error" }; } }, -}) +}); export const scanWithVirusTotal = internalAction({ args: { - versionId: v.id('skillVersions'), + versionId: v.id("skillVersions"), }, handler: async (ctx, args) => { - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('VT_API_KEY not configured, skipping scan') - return + console.log("VT_API_KEY not configured, skipping scan"); + return; } // Get the version details and files const version = await ctx.runQuery(internal.skills.getVersionByIdInternal, { versionId: args.versionId, - }) + }); if (!version) { - console.error(`Version ${args.versionId} not found for scanning`) - return + console.error(`Version ${args.versionId} not found for scanning`); + return; } // Fetch skill info for _meta.json const skill = await ctx.runQuery(internal.skills.getSkillByIdInternal, { skillId: version.skillId, - }) + }); if (!skill) { - console.error(`Skill ${version.skillId} not found for scanning`) - return + console.error(`Skill ${version.skillId} not found for scanning`); + return; } // Build deterministic ZIP with stable meta (no version history). - const entries: Array<{ path: string; bytes: Uint8Array }> = [] + const entries: Array<{ path: string; bytes: Uint8Array }> = []; for (const file of version.files) { - const content = await ctx.storage.get(file.storageId) + const content = await ctx.storage.get(file.storageId); if (content) { - const buffer = new Uint8Array(await content.arrayBuffer()) - entries.push({ path: file.path, bytes: buffer }) + const buffer = new Uint8Array(await content.arrayBuffer()); + entries.push({ path: file.path, bytes: buffer }); } } if (entries.length === 0) { - console.warn(`No files found for version ${args.versionId}, skipping scan`) - return + console.warn(`No files found for version ${args.versionId}, skipping scan`); + return; } const zipArray = buildDeterministicZip(entries, { @@ -348,36 +356,36 @@ export const scanWithVirusTotal = internalAction({ slug: skill.slug, version: version.version, publishedAt: version.createdAt, - }) + }); // Calculate SHA-256 of the ZIP (this hash includes _meta.json) - const hashBuffer = await crypto.subtle.digest('SHA-256', zipArray) + const hashBuffer = await crypto.subtle.digest("SHA-256", zipArray); const sha256hash = Array.from(new Uint8Array(hashBuffer)) - .map((b) => b.toString(16).padStart(2, '0')) - .join('') + .map((b) => b.toString(16).padStart(2, "0")) + .join(""); // Update version with hash await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId: args.versionId, sha256hash, - }) + }); // Check if file already exists in VT and has AI analysis try { - const existingFile = await checkExistingFile(apiKey, sha256hash) + const existingFile = await checkExistingFile(apiKey, sha256hash); if (existingFile) { const aiResult = existingFile.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); if (aiResult) { // File exists and has AI analysis - use the verdict - const verdict = normalizeVerdict(aiResult.verdict) - const status = verdictToStatus(verdict) + const verdict = normalizeVerdict(aiResult.verdict); + const status = verdictToStatus(verdict); console.log( `Version ${args.versionId} found in VT with AI analysis. Hash: ${sha256hash}. Verdict: ${verdict}`, - ) + ); // Cache VT analysis in version await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { @@ -389,62 +397,62 @@ export const scanWithVirusTotal = internalAction({ source: aiResult.source, checkedAt: Date.now(), }, - }) + }); // VT finalizes moderation visibility for newly published versions. await ctx.runMutation(internal.skills.approveSkillByHashInternal, { sha256hash, - scanner: 'vt', + scanner: "vt", status, - }) - return + }); + return; } // File exists but no AI analysis - need to upload for fresh scan console.log( `Version ${args.versionId} found in VT but no AI analysis. Hash: ${sha256hash}. Uploading...`, - ) + ); } else { - console.log(`Version ${args.versionId} not found in VT. Hash: ${sha256hash}. Uploading...`) + console.log(`Version ${args.versionId} not found in VT. Hash: ${sha256hash}. Uploading...`); } } catch (error) { - console.error('Error checking existing file in VT:', error) + console.error("Error checking existing file in VT:", error); // Continue to upload even if check fails } // Upload file to VirusTotal (v3 API) - const formData = new FormData() - const blob = new Blob([zipArray], { type: 'application/zip' }) - formData.append('file', blob, 'skill.zip') + const formData = new FormData(); + const blob = new Blob([zipArray], { type: "application/zip" }); + formData.append("file", blob, "skill.zip"); try { - const response = await fetch('https://www.virustotal.com/api/v3/files', { - method: 'POST', + const response = await fetch("https://www.virustotal.com/api/v3/files", { + method: "POST", headers: { - 'x-apikey': apiKey, + "x-apikey": apiKey, }, body: formData, - }) + }); if (!response.ok) { - const error = await response.text() - console.error('VirusTotal upload error:', error) - return + const error = await response.text(); + console.error("VirusTotal upload error:", error); + return; } - const result = (await response.json()) as { data: { id: string } } + const result = (await response.json()) as { data: { id: string } }; console.log( `Successfully uploaded version ${args.versionId} to VT. Hash: ${sha256hash}. Analysis ID: ${result.data.id}`, - ) + ); // Don't set moderation state to scanner.vt.pending here — the LLM eval // runs concurrently and will set the initial moderation state. VT only // updates moderation when it has an actual verdict (clean/suspicious/malicious). } catch (error) { - console.error('Failed to upload to VirusTotal:', error) + console.error("Failed to upload to VirusTotal:", error); } }, -}) +}); /** * Poll for pending scans and update skill moderation status @@ -455,24 +463,24 @@ export const pollPendingScans = internalAction({ batchSize: v.optional(v.number()), }, handler: async (ctx, args): Promise => { - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:pollPendingScans] VT_API_KEY not configured, skipping') - return { processed: 0, updated: 0, healthy: false } + console.log("[vt:pollPendingScans] VT_API_KEY not configured, skipping"); + return { processed: 0, updated: 0, healthy: false }; } - const batchSize = args.batchSize ?? 10 + const batchSize = args.batchSize ?? 10; // Check queue health // TODO: Setup webhook/notification (Slack, Discord, email) when queue is unhealthy const health: ScanQueueHealth = await ctx.runQuery( internal.skills.getScanQueueHealthInternal, {}, - ) + ); if (!health.healthy) { console.warn( `[vt:pollPendingScans] QUEUE UNHEALTHY: ${health.queueSize} pending, ${health.veryStaleCount} stale >24h, oldest ${health.oldestAgeMinutes}m`, - ) + ); } // Get skills pending scan (randomized selection) @@ -481,86 +489,86 @@ export const pollPendingScans = internalAction({ { limit: batchSize, }, - ) + ); if (pendingSkills.length === 0) { - return { processed: 0, updated: 0, healthy: health.healthy, queueSize: health.queueSize } + return { processed: 0, updated: 0, healthy: health.healthy, queueSize: health.queueSize }; } console.log( `[vt:pollPendingScans] Checking ${pendingSkills.length} pending skills (queue: ${health.queueSize})`, - ) + ); - const MAX_CHECK_COUNT = 10 // After this many checks, mark as stale + const MAX_CHECK_COUNT = 10; // After this many checks, mark as stale - let updated = 0 - let staled = 0 + let updated = 0; + let staled = 0; for (const { skillId, versionId, sha256hash, checkCount } of pendingSkills) { if (!versionId) { - console.log(`[vt:pollPendingScans] Skill ${skillId} missing versionId, skipping`) - continue + console.log(`[vt:pollPendingScans] Skill ${skillId} missing versionId, skipping`); + continue; } if (!sha256hash) { console.log( `[vt:pollPendingScans] Skill ${skillId} version ${versionId} has no hash, skipping`, - ) - continue + ); + continue; } // Track this check attempt - await ctx.runMutation(internal.skills.updateScanCheckInternal, { skillId }) + await ctx.runMutation(internal.skills.updateScanCheckInternal, { skillId }); try { - const vtResult = await checkExistingFile(apiKey, sha256hash) + const vtResult = await checkExistingFile(apiKey, sha256hash); if (!vtResult) { - console.log(`[vt:pollPendingScans] Hash ${sha256hash} not found in VT yet`) + console.log(`[vt:pollPendingScans] Hash ${sha256hash} not found in VT yet`); // Check if we've exceeded max attempts — write stale vtAnalysis so it // drops out of the poll query without overwriting LLM moderationReason if (checkCount + 1 >= MAX_CHECK_COUNT) { console.warn( `[vt:pollPendingScans] Skill ${skillId} exceeded max checks, marking stale`, - ) + ); await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId, - vtAnalysis: { status: 'stale', checkedAt: Date.now() }, - }) - staled++ + vtAnalysis: { status: "stale", checkedAt: Date.now() }, + }); + staled++; } - continue + continue; } const aiResult = vtResult.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); if (!aiResult) { // No Code Insight - trigger a rescan to get it console.log( `[vt:pollPendingScans] Hash ${sha256hash} has no Code Insight, requesting rescan`, - ) - await requestRescan(apiKey, sha256hash) + ); + await requestRescan(apiKey, sha256hash); // Check if we've exceeded max attempts — write stale vtAnalysis so it // drops out of the poll query without overwriting LLM moderationReason if (checkCount + 1 >= MAX_CHECK_COUNT) { console.warn( `[vt:pollPendingScans] Skill ${skillId} exceeded max checks, marking stale`, - ) + ); await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId, - vtAnalysis: { status: 'stale', checkedAt: Date.now() }, - }) - staled++ + vtAnalysis: { status: "stale", checkedAt: Date.now() }, + }); + staled++; } - continue + continue; } // We have a verdict - update the skill - const verdict = normalizeVerdict(aiResult.verdict) - const status = verdictToStatus(verdict) + const verdict = normalizeVerdict(aiResult.verdict); + const status = verdictToStatus(verdict); console.log( `[vt:pollPendingScans] Hash ${sha256hash} verdict: ${verdict} -> status: ${status}`, - ) + ); // Cache VT analysis in version await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { @@ -572,32 +580,32 @@ export const pollPendingScans = internalAction({ source: aiResult.source, checkedAt: Date.now(), }, - }) + }); // VT finalizes moderation visibility for newly published versions. await ctx.runMutation(internal.skills.approveSkillByHashInternal, { sha256hash, - scanner: 'vt', + scanner: "vt", status, - }) - updated++ + }); + updated++; } catch (error) { - console.error(`[vt:pollPendingScans] Error checking hash ${sha256hash}:`, error) + console.error(`[vt:pollPendingScans] Error checking hash ${sha256hash}:`, error); } } console.log( `[vt:pollPendingScans] Processed ${pendingSkills.length}, updated ${updated}, staled ${staled}`, - ) + ); return { processed: pendingSkills.length, updated, staled, healthy: health.healthy, queueSize: health.queueSize, - } + }; }, -}) +}); /** * Check if a file already exists in VirusTotal by hash @@ -607,23 +615,23 @@ async function checkExistingFile( sha256hash: string, ): Promise { const response = await fetch(`https://www.virustotal.com/api/v3/files/${sha256hash}`, { - method: 'GET', + method: "GET", headers: { - 'x-apikey': apiKey, + "x-apikey": apiKey, }, - }) + }); if (response.status === 404) { // File not found in VT - return null + return null; } if (!response.ok) { - const error = await response.text() - throw new Error(`VT API error: ${response.status} - ${error}`) + const error = await response.text(); + throw new Error(`VT API error: ${response.status} - ${error}`); } - return (await response.json()) as VTFileResponse + return (await response.json()) as VTFileResponse; } /** @@ -632,21 +640,21 @@ async function checkExistingFile( async function requestRescan(apiKey: string, sha256hash: string): Promise { try { const response = await fetch(`https://www.virustotal.com/api/v3/files/${sha256hash}/analyse`, { - method: 'POST', + method: "POST", headers: { - 'x-apikey': apiKey, + "x-apikey": apiKey, }, - }) + }); if (!response.ok) { - console.error(`[vt:requestRescan] Failed for ${sha256hash}: ${response.status}`) - return false + console.error(`[vt:requestRescan] Failed for ${sha256hash}: ${response.status}`); + return false; } - return true + return true; } catch (error) { - console.error(`[vt:requestRescan] Error for ${sha256hash}:`, error) - return false + console.error(`[vt:requestRescan] Error for ${sha256hash}:`, error); + return false; } } @@ -659,13 +667,13 @@ export const backfillPendingScans = internalAction({ triggerRescans: v.optional(v.boolean()), }, handler: async (ctx, args): Promise => { - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:backfill] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:backfill] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const triggerRescans = args.triggerRescans ?? true + const triggerRescans = args.triggerRescans ?? true; // Get ALL pending skills (no limit) const pendingSkills: PendingScanSkill[] = await ctx.runQuery( @@ -673,55 +681,55 @@ export const backfillPendingScans = internalAction({ { limit: 10000, }, - ) + ); - console.log(`[vt:backfill] Found ${pendingSkills.length} pending skills`) + console.log(`[vt:backfill] Found ${pendingSkills.length} pending skills`); - let updated = 0 - let rescansRequested = 0 - let noHash = 0 - let notInVT = 0 - let errors = 0 + let updated = 0; + let rescansRequested = 0; + let noHash = 0; + let notInVT = 0; + let errors = 0; for (const { sha256hash } of pendingSkills) { if (!sha256hash) { - noHash++ - continue + noHash++; + continue; } try { - const vtResult = await checkExistingFile(apiKey, sha256hash) + const vtResult = await checkExistingFile(apiKey, sha256hash); if (!vtResult) { - notInVT++ - continue + notInVT++; + continue; } const aiResult = vtResult.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); if (!aiResult) { if (triggerRescans) { - await requestRescan(apiKey, sha256hash) - rescansRequested++ + await requestRescan(apiKey, sha256hash); + rescansRequested++; } - continue + continue; } // We have a verdict - update the skill - const verdict = normalizeVerdict(aiResult.verdict) - const status = verdictToStatus(verdict) + const verdict = normalizeVerdict(aiResult.verdict); + const status = verdictToStatus(verdict); await ctx.runMutation(internal.skills.approveSkillByHashInternal, { sha256hash, - scanner: 'vt', + scanner: "vt", status, - }) - updated++ + }); + updated++; } catch (error) { - console.error(`[vt:backfill] Error for ${sha256hash}:`, error) - errors++ + console.error(`[vt:backfill] Error for ${sha256hash}:`, error); + errors++; } } @@ -733,12 +741,12 @@ export const backfillPendingScans = internalAction({ notInVT, errors, remaining: pendingSkills.length - updated, - } + }; - console.log('[vt:backfill] Complete:', result) - return result + console.log("[vt:backfill] Complete:", result); + return result; }, -}) +}); /** * Daily re-scan of ALL active skills to detect verdict changes. @@ -758,62 +766,62 @@ export const rescanActiveSkills = internalAction({ startTime: v.optional(v.number()), }, handler: async (ctx, args) => { - const startTime = args.startTime ?? Date.now() - const apiKey = process.env.VT_API_KEY + const startTime = args.startTime ?? Date.now(); + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:rescan] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:rescan] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const batchSize = args.batchSize ?? 100 - const cursor = args.cursor ?? 0 - let accTotal = args.accTotal ?? 0 - let accUpdated = args.accUpdated ?? 0 - let accUnchanged = args.accUnchanged ?? 0 - let accErrors = args.accErrors ?? 0 - const accFlaggedSkills = [...(args.accFlaggedSkills ?? [])] + const batchSize = args.batchSize ?? 100; + const cursor = args.cursor ?? 0; + let accTotal = args.accTotal ?? 0; + let accUpdated = args.accUpdated ?? 0; + let accUnchanged = args.accUnchanged ?? 0; + let accErrors = args.accErrors ?? 0; + const accFlaggedSkills = [...(args.accFlaggedSkills ?? [])]; const batch = await ctx.runQuery(internal.skills.getActiveSkillBatchForRescanInternal, { cursor, batchSize, - }) + }); if (batch.skills.length === 0 && accTotal === 0) { - console.log('[vt:rescan] No active skills to re-scan') - return { total: 0, updated: 0, unchanged: 0, errors: 0 } + console.log("[vt:rescan] No active skills to re-scan"); + return { total: 0, updated: 0, unchanged: 0, errors: 0 }; } console.log( `[vt:rescan] Processing batch of ${batch.skills.length} skills (cursor=${cursor}, accumulated=${accTotal})`, - ) + ); for (const { versionId, sha256hash, slug } of batch.skills) { try { - const vtResult = await checkExistingFile(apiKey, sha256hash) + const vtResult = await checkExistingFile(apiKey, sha256hash); if (!vtResult) { - accErrors++ - continue + accErrors++; + continue; } const aiResult = vtResult.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); if (!aiResult) { await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId, vtAnalysis: { - status: 'pending', + status: "pending", checkedAt: Date.now(), }, - }) - accUnchanged++ - continue + }); + accUnchanged++; + continue; } - const verdict = normalizeVerdict(aiResult.verdict) - const status = verdictToStatus(verdict) + const verdict = normalizeVerdict(aiResult.verdict); + const status = verdictToStatus(verdict); await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId, @@ -824,32 +832,32 @@ export const rescanActiveSkills = internalAction({ source: aiResult.source, checkedAt: Date.now(), }, - }) + }); - if (status === 'malicious' || status === 'suspicious') { - console.warn(`[vt:rescan] ${slug}: verdict changed to ${status}!`) - accFlaggedSkills.push({ slug, status }) + if (status === "malicious" || status === "suspicious") { + console.warn(`[vt:rescan] ${slug}: verdict changed to ${status}!`); + accFlaggedSkills.push({ slug, status }); await ctx.runMutation(internal.skills.escalateByVtInternal, { sha256hash, status, - }) - accUpdated++ + }); + accUpdated++; } else { - accUnchanged++ + accUnchanged++; } } catch (error) { - console.error(`[vt:rescan] Error for ${slug}:`, error) - accErrors++ + console.error(`[vt:rescan] Error for ${slug}:`, error); + accErrors++; } } - accTotal += batch.skills.length + accTotal += batch.skills.length; if (!batch.done) { // Schedule next batch console.log( `[vt:rescan] Scheduling next batch (cursor=${batch.nextCursor}, total so far=${accTotal})`, - ) + ); await ctx.scheduler.runAfter(0, internal.vt.rescanActiveSkills, { cursor: batch.nextCursor, batchSize, @@ -859,22 +867,22 @@ export const rescanActiveSkills = internalAction({ accErrors, accFlaggedSkills: accFlaggedSkills.length > 0 ? accFlaggedSkills : undefined, startTime, - }) - return { status: 'continuing', totalSoFar: accTotal } + }); + return { status: "continuing", totalSoFar: accTotal }; } // Final batch — log results - const durationMs = Date.now() - startTime + const durationMs = Date.now() - startTime; await ctx.runMutation(internal.vt.logScanResultInternal, { - type: 'daily_rescan', + type: "daily_rescan", total: accTotal, updated: accUpdated, unchanged: accUnchanged, errors: accErrors, flaggedSkills: accFlaggedSkills.length > 0 ? accFlaggedSkills : undefined, durationMs, - }) + }); const result = { total: accTotal, @@ -882,11 +890,11 @@ export const rescanActiveSkills = internalAction({ unchanged: accUnchanged, errors: accErrors, durationMs, - } - console.log('[vt:rescan] Complete:', result) - return result + }; + console.log("[vt:rescan] Complete:", result); + return result; }, -}) +}); /** * Scan all unscanned skills (active with null moderationReason). @@ -895,61 +903,61 @@ export const rescanActiveSkills = internalAction({ export const scanUnscannedSkills = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const startTime = Date.now() - const apiKey = process.env.VT_API_KEY + const startTime = Date.now(); + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:scanUnscanned] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:scanUnscanned] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const batchSize = args.batchSize ?? 50 + const batchSize = args.batchSize ?? 50; const skills: UnscannedActiveSkill[] = await ctx.runQuery( internal.skills.getUnscannedActiveSkillsInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:scanUnscanned] No unscanned skills found') - return { total: 0, scanned: 0, errors: 0 } + console.log("[vt:scanUnscanned] No unscanned skills found"); + return { total: 0, scanned: 0, errors: 0 }; } - console.log(`[vt:scanUnscanned] Scanning ${skills.length} unscanned skills`) + console.log(`[vt:scanUnscanned] Scanning ${skills.length} unscanned skills`); - let scanned = 0 - let errors = 0 + let scanned = 0; + let errors = 0; for (const { versionId, slug } of skills) { if (!versionId) { - errors++ - continue + errors++; + continue; } try { - await ctx.runAction(internal.vt.scanWithVirusTotal, { versionId }) - scanned++ - console.log(`[vt:scanUnscanned] Scanned ${slug} (${scanned}/${skills.length})`) + await ctx.runAction(internal.vt.scanWithVirusTotal, { versionId }); + scanned++; + console.log(`[vt:scanUnscanned] Scanned ${slug} (${scanned}/${skills.length})`); } catch (error) { - console.error(`[vt:scanUnscanned] Error scanning ${slug}:`, error) - errors++ + console.error(`[vt:scanUnscanned] Error scanning ${slug}:`, error); + errors++; } } - const durationMs = Date.now() - startTime + const durationMs = Date.now() - startTime; await ctx.runMutation(internal.vt.logScanResultInternal, { - type: 'backfill', + type: "backfill", total: skills.length, updated: scanned, unchanged: 0, errors, durationMs, - }) + }); - const result: ScanUnscannedSkillsResult = { total: skills.length, scanned, errors, durationMs } - console.log('[vt:scanUnscanned] Complete:', result) - return result + const result: ScanUnscannedSkillsResult = { total: skills.length, scanned, errors, durationMs }; + console.log("[vt:scanUnscanned] Complete:", result); + return result; }, -}) +}); /** * Scan all legacy skills (active but still have pending.scan reason). @@ -958,62 +966,62 @@ export const scanUnscannedSkills = internalAction({ export const scanLegacySkills = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const startTime = Date.now() - const apiKey = process.env.VT_API_KEY + const startTime = Date.now(); + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:scanLegacy] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:scanLegacy] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; const skills: LegacyPendingScanSkill[] = await ctx.runQuery( internal.skills.getLegacyPendingScanSkillsInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:scanLegacy] No legacy skills to scan') - return { total: 0, scanned: 0, errors: 0 } + console.log("[vt:scanLegacy] No legacy skills to scan"); + return { total: 0, scanned: 0, errors: 0 }; } - console.log(`[vt:scanLegacy] Scanning ${skills.length} legacy skills`) + console.log(`[vt:scanLegacy] Scanning ${skills.length} legacy skills`); - let scanned = 0 - let alreadyHasHash = 0 - let errors = 0 + let scanned = 0; + let alreadyHasHash = 0; + let errors = 0; for (const { versionId, slug, hasHash } of skills) { if (!versionId) { - errors++ - continue + errors++; + continue; } try { if (hasHash) { // Already has hash, just need to check VT and update reason - alreadyHasHash++ + alreadyHasHash++; } // Trigger VT scan (will upload if needed, check for results) - await ctx.runAction(internal.vt.scanWithVirusTotal, { versionId }) - scanned++ - console.log(`[vt:scanLegacy] Scanned ${slug} (${scanned}/${skills.length})`) + await ctx.runAction(internal.vt.scanWithVirusTotal, { versionId }); + scanned++; + console.log(`[vt:scanLegacy] Scanned ${slug} (${scanned}/${skills.length})`); } catch (error) { - console.error(`[vt:scanLegacy] Error scanning ${slug}:`, error) - errors++ + console.error(`[vt:scanLegacy] Error scanning ${slug}:`, error); + errors++; } } - const durationMs = Date.now() - startTime + const durationMs = Date.now() - startTime; await ctx.runMutation(internal.vt.logScanResultInternal, { - type: 'backfill', + type: "backfill", total: skills.length, updated: scanned, unchanged: alreadyHasHash, errors, durationMs, - }) + }); const result: ScanLegacySkillsResult = { total: skills.length, @@ -1021,11 +1029,11 @@ export const scanLegacySkills = internalAction({ alreadyHasHash, errors, durationMs, - } - console.log('[vt:scanLegacy] Complete:', result) - return result + }; + console.log("[vt:scanLegacy] Complete:", result); + return result; }, -}) +}); /** * Backfill vtAnalysis for active skills that have VT results but no cached data. @@ -1035,52 +1043,52 @@ export const scanLegacySkills = internalAction({ export const backfillActiveSkillsVTCache = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:backfillActive] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:backfillActive] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; const skills: ActiveSkillsMissingVTCache[] = await ctx.runQuery( internal.skills.getActiveSkillsMissingVTCacheInternal, { limit: batchSize }, - ) + ); - console.log(`[vt:backfillActive] Found ${skills.length} active skills missing VT cache`) + console.log(`[vt:backfillActive] Found ${skills.length} active skills missing VT cache`); if (skills.length === 0) { - return { total: 0, updated: 0, noResults: 0, errors: 0, done: true } + return { total: 0, updated: 0, noResults: 0, errors: 0, done: true }; } - let updated = 0 - let noResults = 0 - let errors = 0 + let updated = 0; + let noResults = 0; + let errors = 0; for (const { versionId, sha256hash, slug } of skills) { try { - const vtResult = await checkExistingFile(apiKey, sha256hash) + const vtResult = await checkExistingFile(apiKey, sha256hash); if (!vtResult) { - console.log(`[vt:backfillActive] ${slug}: not in VT`) - noResults++ - continue + console.log(`[vt:backfillActive] ${slug}: not in VT`); + noResults++; + continue; } const aiResult = vtResult.data.attributes.crowdsourced_ai_results?.find( - (r) => r.category === 'code_insight', - ) + (r) => r.category === "code_insight", + ); if (!aiResult) { - console.log(`[vt:backfillActive] ${slug}: no Code Insight yet`) - noResults++ - continue + console.log(`[vt:backfillActive] ${slug}: no Code Insight yet`); + noResults++; + continue; } // Update the version with VT analysis - const verdict = normalizeVerdict(aiResult.verdict) - const status = verdictToStatus(verdict) + const verdict = normalizeVerdict(aiResult.verdict); + const status = verdictToStatus(verdict); await ctx.runMutation(internal.skills.updateVersionScanResultsInternal, { versionId, @@ -1092,35 +1100,35 @@ export const backfillActiveSkillsVTCache = internalAction({ source: aiResult.source, checkedAt: Date.now(), }, - }) + }); - console.log(`[vt:backfillActive] ${slug}: updated with ${status}`) - updated++ + console.log(`[vt:backfillActive] ${slug}: updated with ${status}`); + updated++; } catch (error) { - console.error(`[vt:backfillActive] Error for ${slug}:`, error) - errors++ + console.error(`[vt:backfillActive] Error for ${slug}:`, error); + errors++; } } - const done = skills.length < batchSize + const done = skills.length < batchSize; const result: BackfillActiveSkillsVTCacheResult = { total: skills.length, updated, noResults, errors, done, - } - console.log('[vt:backfillActive] Complete:', result) + }; + console.log("[vt:backfillActive] Complete:", result); // Self-schedule next batch if there are more skills to process if (!done) { - console.log('[vt:backfillActive] Scheduling next batch...') - await ctx.scheduler.runAfter(0, internal.vt.backfillActiveSkillsVTCache, { batchSize }) + console.log("[vt:backfillActive] Scheduling next batch..."); + await ctx.scheduler.runAfter(0, internal.vt.backfillActiveSkillsVTCache, { batchSize }); } - return result + return result; }, -}) +}); /** * Request VT reanalysis for skills stuck at scanner.vt.pending. @@ -1129,42 +1137,42 @@ export const backfillActiveSkillsVTCache = internalAction({ export const requestReanalysisForPending = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const apiKey = process.env.VT_API_KEY + const apiKey = process.env.VT_API_KEY; if (!apiKey) { - console.log('[vt:requestReanalysis] VT_API_KEY not configured') - return { error: 'VT_API_KEY not configured' } + console.log("[vt:requestReanalysis] VT_API_KEY not configured"); + return { error: "VT_API_KEY not configured" }; } - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; // Get skills with scanner.vt.pending moderationReason const skills: PendingVTSkill[] = await ctx.runQuery( internal.skills.getPendingVTSkillsInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:requestReanalysis] No pending skills found') - return { total: 0, requested: 0, done: true } + console.log("[vt:requestReanalysis] No pending skills found"); + return { total: 0, requested: 0, done: true }; } - console.log(`[vt:requestReanalysis] Found ${skills.length} skills to request reanalysis`) + console.log(`[vt:requestReanalysis] Found ${skills.length} skills to request reanalysis`); - let requested = 0 - let errors = 0 + let requested = 0; + let errors = 0; for (const { slug, sha256hash } of skills) { try { - const success = await requestRescan(apiKey, sha256hash) + const success = await requestRescan(apiKey, sha256hash); if (success) { - console.log(`[vt:requestReanalysis] ${slug}: rescan requested`) - requested++ + console.log(`[vt:requestReanalysis] ${slug}: rescan requested`); + requested++; } else { - errors++ + errors++; } } catch (error) { - console.error(`[vt:requestReanalysis] ${slug}: error`, error) - errors++ + console.error(`[vt:requestReanalysis] ${slug}: error`, error); + errors++; } } @@ -1173,11 +1181,11 @@ export const requestReanalysisForPending = internalAction({ requested, errors, done: skills.length < batchSize, - } - console.log('[vt:requestReanalysis] Complete:', result) - return result + }; + console.log("[vt:requestReanalysis] Complete:", result); + return result; }, -}) +}); /** * Fix skills with null moderationStatus by setting them to 'active'. @@ -1185,28 +1193,28 @@ export const requestReanalysisForPending = internalAction({ export const fixNullModerationStatus = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; const skills: NullModerationStatusSkill[] = await ctx.runQuery( internal.skills.getSkillsWithNullModerationStatusInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:fixNullStatus] No skills with null status found') - return { total: 0, fixed: 0, done: true } + console.log("[vt:fixNullStatus] No skills with null status found"); + return { total: 0, fixed: 0, done: true }; } - console.log(`[vt:fixNullStatus] Found ${skills.length} skills with null moderationStatus`) + console.log(`[vt:fixNullStatus] Found ${skills.length} skills with null moderationStatus`); for (const { skillId, slug: _slug } of skills) { - await ctx.runMutation(internal.skills.setSkillModerationStatusActiveInternal, { skillId }) + await ctx.runMutation(internal.skills.setSkillModerationStatusActiveInternal, { skillId }); } - console.log(`[vt:fixNullStatus] Fixed ${skills.length} skills`) - return { total: skills.length, fixed: skills.length, done: skills.length < batchSize } + console.log(`[vt:fixNullStatus] Fixed ${skills.length} skills`); + return { total: skills.length, fixed: skills.length, done: skills.length < batchSize }; }, -}) +}); /** * Sync moderationReason for skills that have vtAnalysis cached but stale moderationReason. @@ -1215,39 +1223,39 @@ export const fixNullModerationStatus = internalAction({ export const syncModerationReasons = internalAction({ args: { batchSize: v.optional(v.number()) }, handler: async (ctx, args): Promise => { - const batchSize = args.batchSize ?? 100 + const batchSize = args.batchSize ?? 100; const skills: StaleModerationReasonSkill[] = await ctx.runQuery( internal.skills.getSkillsWithStaleModerationReasonInternal, { limit: batchSize }, - ) + ); if (skills.length === 0) { - console.log('[vt:syncModeration] No stale skills found') - return { total: 0, synced: 0, noVtAnalysis: 0, done: true } + console.log("[vt:syncModeration] No stale skills found"); + return { total: 0, synced: 0, noVtAnalysis: 0, done: true }; } - console.log(`[vt:syncModeration] Found ${skills.length} skills with stale moderationReason`) + console.log(`[vt:syncModeration] Found ${skills.length} skills with stale moderationReason`); - let synced = 0 - let noVtAnalysis = 0 + let synced = 0; + let noVtAnalysis = 0; for (const { skillId, versionId: _versionId, slug, currentReason, vtStatus } of skills) { if (!vtStatus) { - noVtAnalysis++ - continue + noVtAnalysis++; + continue; } // Map vtAnalysis.status to moderationReason - const newReason = `scanner.vt.${vtStatus}` as const + const newReason = `scanner.vt.${vtStatus}` as const; await ctx.runMutation(internal.skills.updateSkillModerationReasonInternal, { skillId, moderationReason: newReason, - }) + }); - console.log(`[vt:syncModeration] ${slug}: ${currentReason} -> ${newReason}`) - synced++ + console.log(`[vt:syncModeration] ${slug}: ${currentReason} -> ${newReason}`); + synced++; } const result: SyncModerationReasonsResult = { @@ -1255,8 +1263,92 @@ export const syncModerationReasons = internalAction({ synced, noVtAnalysis, done: skills.length < batchSize, + }; + console.log("[vt:syncModeration] Complete:", result); + return result; + }, +}); + +/** + * Backfill normalized moderation fields for all published skills (excluding removed). + * Safe to rerun: each batch recomputes from version static/vt/llm signals. + */ +export const backfillModerationV2 = internalAction({ + args: { + cursor: v.optional(v.number()), + batchSize: v.optional(v.number()), + accTotal: v.optional(v.number()), + accUpdated: v.optional(v.number()), + accErrors: v.optional(v.number()), + startTime: v.optional(v.number()), + }, + handler: async (ctx, args): Promise => { + const batchSize = args.batchSize ?? 100; + const cursor = args.cursor ?? 0; + const startTime = args.startTime ?? Date.now(); + let accTotal = args.accTotal ?? 0; + let accUpdated = args.accUpdated ?? 0; + let accErrors = args.accErrors ?? 0; + + const batch = await ctx.runQuery(internal.skills.getSkillBatchForModerationBackfillInternal, { + cursor, + batchSize, + }); + + if (batch.skills.length === 0 && batch.done) { + const durationMs = Date.now() - startTime; + await ctx.runMutation(internal.vt.logScanResultInternal, { + type: "backfill", + total: accTotal, + updated: accUpdated, + unchanged: 0, + errors: accErrors, + durationMs, + }); + return { + total: accTotal, + updated: accUpdated, + errors: accErrors, + durationMs, + done: true, + }; } - console.log('[vt:syncModeration] Complete:', result) - return result + + for (const entry of batch.skills) { + try { + const result = await ctx.runMutation(internal.skills.applyModerationBackfillInternal, { + skillId: entry.skillId, + versionId: entry.versionId, + }); + if (result?.ok) accUpdated += 1; + } catch (error) { + console.error("[vt:backfillModerationV2] Failed for", entry.slug, error); + accErrors += 1; + } + } + accTotal += batch.skills.length; + + if (!batch.done) { + await ctx.scheduler.runAfter(0, internal.vt.backfillModerationV2, { + cursor: batch.nextCursor, + batchSize, + accTotal, + accUpdated, + accErrors, + startTime, + }); + return { total: accTotal, updated: accUpdated, errors: accErrors, done: false }; + } + + const durationMs = Date.now() - startTime; + await ctx.runMutation(internal.vt.logScanResultInternal, { + type: "backfill", + total: accTotal, + updated: accUpdated, + unchanged: 0, + errors: accErrors, + durationMs, + }); + return { total: accTotal, updated: accUpdated, errors: accErrors, durationMs, done: true }; }, -}) +}); diff --git a/docs/security.md b/docs/security.md index a8c1054d0..fa5d9da5f 100644 --- a/docs/security.md +++ b/docs/security.md @@ -1,5 +1,5 @@ --- -summary: 'Security + moderation controls (reports, bans, upload gating).' +summary: "Security + moderation controls (reports, bans, upload gating)." read_when: - Working on moderation or abuse controls - Reviewing upload restrictions @@ -74,3 +74,27 @@ read_when: skills. - Word counting is language-aware (`Intl.Segmenter` with fallback), reducing false positives for non-space-separated languages. + +## Moderation v2 (reason codes + evidence) + +- Skills now carry normalized moderation fields: + - `moderationVerdict`: `clean | suspicious | malicious` + - `moderationReasonCodes`: stable reason-code list + - `moderationEvidence`: capped finding snippets (`code`, `severity`, `file`, `line`, `message`, `evidence`) + - `moderationEngineVersion`, `moderationEvaluatedAt`, `moderationSourceVersionId` +- Legacy fields (`moderationReason`, `moderationFlags`) remain for compatibility and are kept in sync. +- Public API responses still include `isSuspicious` and `isMalwareBlocked`, plus additive fields (`verdict`, `reasonCodes`, `summary`, `engineVersion`, `updatedAt`). +- Detailed moderation endpoint: + - `GET /api/v1/skills/:slug/moderation` + - owner/staff receive full evidence + - public callers receive sanitized evidence for flagged skills only + +Policy: + +- `malicious`: blocked from install/download. +- `suspicious`: visible with warnings; CLI install/update requires explicit confirm (or `--force` in non-interactive mode). +- `pending`: publish-time quarantine behavior unchanged. + +Backfill: + +- `vt.backfillModerationV2` recomputes normalized moderation fields for historical published skills in bounded batches. diff --git a/docs/spec.md b/docs/spec.md index 1d050aca0..84c31de2d 100644 --- a/docs/spec.md +++ b/docs/spec.md @@ -9,6 +9,7 @@ read_when: # ClawHub — product + implementation spec (v1) ## Goals + - onlycrabs.ai mode for sharing `SOUL.md` bundles (host-based entry point). - Minimal, fast SPA for browsing and publishing agent skills. - Skills stored in Convex (files + metadata + versions + stats). @@ -19,12 +20,14 @@ read_when: - Moderation: badges + comment delete; audit everything. ## Non-goals (v1) + - Paid features, private skills, or binary assets. - GitHub App sync beyond backups (future phase). ## Core objects ### User + - `authId` (from Convex Auth provider) - `handle` (GitHub login) - `name`, `bio` @@ -33,6 +36,7 @@ read_when: - `createdAt`, `updatedAt` ### Skill + - `slug` (unique) - `displayName` - `ownerUserId` @@ -46,11 +50,16 @@ read_when: - `moderationStatus`: `active | hidden | removed` - `moderationFlags`: `string[]` (automatic detection) - `moderationNotes`, `moderationReason` +- `moderationVerdict`: `clean | suspicious | malicious` (normalized decision) +- `moderationReasonCodes`: `string[]` (stable machine-readable reason IDs) +- `moderationEvidence`: finding snippets (`code`, `severity`, `file`, `line`, `message`, `evidence`) +- `moderationSummary`, `moderationEngineVersion`, `moderationEvaluatedAt`, `moderationSourceVersionId` - `hiddenAt`, `hiddenBy`, `lastReviewedAt`, `reportCount` - `stats`: `{ downloads, stars, versions, comments }` - `createdAt`, `updatedAt` ### SkillVersion + - `skillId` - `version` (semver string) - `tag` (string, optional; `latest` always maintained separately) @@ -58,12 +67,15 @@ read_when: - `files`: list of file metadata - `path`, `size`, `storageId`, `sha256` - `parsed` (metadata extracted from SKILL.md) +- `staticScan`: deterministic static-analysis payload (`status`, `reasonCodes`, `findings`, `summary`, `engineVersion`, `checkedAt`) - `vectorDocId` (if using RAG component) OR `embeddingId` - `createdBy`, `createdAt` - `softDeletedAt` (nullable) ### Parsed Skill Metadata + From SKILL.md frontmatter + AgentSkills + Clawdis extensions: + - `name`, `description`, `homepage`, `website`, `url`, `emoji` - `metadata.clawdis`: `always`, `skillKey`, `primaryEnv`, `emoji`, `homepage`, `os`, `requires` (`bins`, `anyBins`, `env`, `config`), `install[]`, `nix` (`plugin`, `systems`), @@ -72,9 +84,8 @@ From SKILL.md frontmatter + AgentSkills + Clawdis extensions: - Nix plugins are different from regular skills; they bundle the skill pack, the CLI binary, and config flags/requirements together. - `metadata` in frontmatter is YAML (object) preferred; legacy JSON-string accepted. - - ### Soul + - `slug` (unique) - `displayName` - `ownerUserId` @@ -86,6 +97,7 @@ From SKILL.md frontmatter + AgentSkills + Clawdis extensions: - `createdAt`, `updatedAt` ### SoulVersion + - `soulId` - `version` (semver string) - `tag` (string, optional; `latest` always maintained separately) @@ -98,22 +110,27 @@ From SKILL.md frontmatter + AgentSkills + Clawdis extensions: - `softDeletedAt` (nullable) ### SoulComment + - `soulId`, `userId`, `body` - `softDeletedAt`, `deletedBy` - `createdAt` ### SoulStar + - `soulId`, `userId`, `createdAt` ### Comment + - `skillId`, `userId`, `body` - `softDeletedAt`, `deletedBy` - `createdAt` ### Star + - `skillId`, `userId`, `createdAt` ### AuditLog + - `actorUserId` - `action` (enum: `badge.set`, `badge.unset`, `comment.delete`, `role.change`) - `targetType` / `targetId` @@ -121,6 +138,7 @@ From SKILL.md frontmatter + AgentSkills + Clawdis extensions: - `createdAt` ## Auth + roles + - Convex Auth with GitHub OAuth App. - Default role `user`; bootstrap `steipete` to `admin` on first login. - Management console: moderators can hide/restore skills + mark duplicates + ban users; admins can change owners, approve badges, hard-delete skills, and ban users (deletes owned skills). @@ -128,37 +146,42 @@ From SKILL.md frontmatter + AgentSkills + Clawdis extensions: - Reporting: any user can report skills; per-user cap 20 active reports; skills auto-hide after >3 unique reports (mods can review/unhide/delete/ban). ## Upload flow (50MB per version) -1) Client requests upload session. -2) Client uploads each file via Convex upload URLs (no binaries, text only). -3) Client submits metadata + file list + changelog + version + tags. -4) Server validates: + +1. Client requests upload session. +2. Client uploads each file via Convex upload URLs (no binaries, text only). +3. Client submits metadata + file list + changelog + version + tags. +4. Server validates: - total size ≤ 50MB - file extensions/text content - SKILL.md exists and frontmatter parseable - version uniqueness - GitHub account age ≥ 7 days -5) Server stores files + metadata, sets `latest` tag, updates stats. +5. Server stores files + metadata, sets `latest` tag, updates stats. Soul upload flow: same as skills (including GitHub account age checks), but only `SOUL.md` is allowed. Seed data lives in `convex/seed.ts` for local dev. ## Versioning + tags + - Each upload is a new `SkillVersion`. - `latest` tag always points to most recent version unless user re-tags. - Rollback: move `latest` (and optionally other tags) to an older version. - Changelog is optional. ## Search + - Vector search over: SKILL.md + other text files + metadata summary (souls index SOUL.md). - Convex embeddings + vector index. - Filters: tag, owner, `redactionApproved` only, min stars, updatedAt. ## Download API + - JSON API for skill metadata + versions. - Download endpoint returns zip of a version (HTTP action). - Soft-delete versions; downloads remain for non-deleted versions only. ## UI (SPA) + - Home: search + filters + trending/featured + “Highlighted” badge. - Skill detail: README render, files list, version history, tags, stats, badges. - Upload/edit: file picker + version + tag + changelog. @@ -166,14 +189,17 @@ Seed data lives in `convex/seed.ts` for local dev. - Admin: user role management + badge approvals + audit log. ## Testing + quality + - Vitest 4 with >=70% global coverage. - Lint: Biome + Oxlint (type-aware). ## Vercel + - Env vars: Convex deployment URLs + GitHub OAuth client + OpenAI key (if used) + GitHub App backup credentials. - SPA feel: client-side transitions, prefetching, optimistic UI. ## Open questions (carry forward) + - Embeddings provider key + rate limits. - Zip generation memory limits (optimize with streaming if needed). - GitHub App repo sync (phase 2). diff --git a/packages/clawdhub/src/cli.ts b/packages/clawdhub/src/cli.ts index 6f5c45327..c49cde5e7 100644 --- a/packages/clawdhub/src/cli.ts +++ b/packages/clawdhub/src/cli.ts @@ -1,19 +1,20 @@ #!/usr/bin/env node -import { stat } from 'node:fs/promises' -import { join, resolve } from 'node:path' -import { Command } from 'commander' -import { getCliBuildLabel, getCliVersion } from './cli/buildInfo.js' -import { resolveClawdbotDefaultWorkspace } from './cli/clawdbotConfig.js' -import { cmdLoginFlow, cmdLogout, cmdWhoami } from './cli/commands/auth.js' +import { Command } from "commander"; +import { stat } from "node:fs/promises"; +import { join, resolve } from "node:path"; +import type { GlobalOpts } from "./cli/types.js"; +import { getCliBuildLabel, getCliVersion } from "./cli/buildInfo.js"; +import { resolveClawdbotDefaultWorkspace } from "./cli/clawdbotConfig.js"; +import { cmdLoginFlow, cmdLogout, cmdWhoami } from "./cli/commands/auth.js"; import { cmdDeleteSkill, cmdHideSkill, cmdUndeleteSkill, cmdUnhideSkill, -} from './cli/commands/delete.js' -import { cmdInspect } from './cli/commands/inspect.js' -import { cmdBanUser, cmdSetRole } from './cli/commands/moderation.js' -import { cmdPublish } from './cli/commands/publish.js' +} from "./cli/commands/delete.js"; +import { cmdInspect } from "./cli/commands/inspect.js"; +import { cmdBanUser, cmdSetRole } from "./cli/commands/moderation.js"; +import { cmdPublish } from "./cli/commands/publish.js"; import { cmdExplore, cmdInstall, @@ -21,364 +22,364 @@ import { cmdSearch, cmdUninstall, cmdUpdate, -} from './cli/commands/skills.js' -import { cmdStarSkill } from './cli/commands/star.js' -import { cmdSync } from './cli/commands/sync.js' -import { cmdUnstarSkill } from './cli/commands/unstar.js' -import { configureCommanderHelp, styleEnvBlock, styleTitle } from './cli/helpStyle.js' -import { DEFAULT_REGISTRY, DEFAULT_SITE } from './cli/registry.js' -import type { GlobalOpts } from './cli/types.js' -import { fail } from './cli/ui.js' -import { readGlobalConfig } from './config.js' +} from "./cli/commands/skills.js"; +import { cmdStarSkill } from "./cli/commands/star.js"; +import { cmdSync } from "./cli/commands/sync.js"; +import { cmdUnstarSkill } from "./cli/commands/unstar.js"; +import { configureCommanderHelp, styleEnvBlock, styleTitle } from "./cli/helpStyle.js"; +import { DEFAULT_REGISTRY, DEFAULT_SITE } from "./cli/registry.js"; +import { fail } from "./cli/ui.js"; +import { readGlobalConfig } from "./config.js"; const program = new Command() - .name('clawhub') + .name("clawhub") .description( `${styleTitle(`ClawHub CLI ${getCliBuildLabel()}`)}\n${styleEnvBlock( - 'install, update, search, and publish agent skills.', + "install, update, search, and publish agent skills.", )}`, ) - .version(getCliVersion(), '-V, --cli-version', 'Show CLI version') - .option('--workdir ', 'Working directory (default: cwd)') - .option('--dir ', 'Skills directory (relative to workdir, default: skills)') - .option('--site ', 'Site base URL (for browser login)') - .option('--registry ', 'Registry API base URL') - .option('--no-input', 'Disable prompts') + .version(getCliVersion(), "-V, --cli-version", "Show CLI version") + .option("--workdir ", "Working directory (default: cwd)") + .option("--dir ", "Skills directory (relative to workdir, default: skills)") + .option("--site ", "Site base URL (for browser login)") + .option("--registry ", "Registry API base URL") + .option("--no-input", "Disable prompts") .showHelpAfterError() .showSuggestionAfterError() .addHelpText( - 'after', + "after", styleEnvBlock( - '\nEnv:\n CLAWHUB_SITE\n CLAWHUB_REGISTRY\n CLAWHUB_WORKDIR\n (CLAWDHUB_* supported)\n', + "\nEnv:\n CLAWHUB_SITE\n CLAWHUB_REGISTRY\n CLAWHUB_WORKDIR\n (CLAWDHUB_* supported)\n", ), - ) + ); -configureCommanderHelp(program) +configureCommanderHelp(program); async function resolveGlobalOpts(): Promise { - const raw = program.opts<{ workdir?: string; dir?: string; site?: string; registry?: string }>() - const workdir = await resolveWorkdir(raw.workdir) - const dir = resolve(workdir, raw.dir ?? 'skills') - const site = raw.site ?? process.env.CLAWHUB_SITE ?? process.env.CLAWDHUB_SITE ?? DEFAULT_SITE + const raw = program.opts<{ workdir?: string; dir?: string; site?: string; registry?: string }>(); + const workdir = await resolveWorkdir(raw.workdir); + const dir = resolve(workdir, raw.dir ?? "skills"); + const site = raw.site ?? process.env.CLAWHUB_SITE ?? process.env.CLAWDHUB_SITE ?? DEFAULT_SITE; const registrySource = raw.registry - ? 'cli' + ? "cli" : process.env.CLAWHUB_REGISTRY || process.env.CLAWDHUB_REGISTRY - ? 'env' - : 'default' + ? "env" + : "default"; const registry = raw.registry ?? process.env.CLAWHUB_REGISTRY ?? process.env.CLAWDHUB_REGISTRY ?? - DEFAULT_REGISTRY - return { workdir, dir, site, registry, registrySource } + DEFAULT_REGISTRY; + return { workdir, dir, site, registry, registrySource }; } function isInputAllowed() { - const globalFlags = program.opts<{ input?: boolean }>() - return globalFlags.input !== false + const globalFlags = program.opts<{ input?: boolean }>(); + return globalFlags.input !== false; } async function resolveWorkdir(explicit?: string) { - if (explicit?.trim()) return resolve(explicit.trim()) - const envWorkdir = process.env.CLAWHUB_WORKDIR?.trim() ?? process.env.CLAWDHUB_WORKDIR?.trim() - if (envWorkdir) return resolve(envWorkdir) + if (explicit?.trim()) return resolve(explicit.trim()); + const envWorkdir = process.env.CLAWHUB_WORKDIR?.trim() ?? process.env.CLAWDHUB_WORKDIR?.trim(); + if (envWorkdir) return resolve(envWorkdir); - const cwd = resolve(process.cwd()) - const hasMarker = await hasClawhubMarker(cwd) - if (hasMarker) return cwd + const cwd = resolve(process.cwd()); + const hasMarker = await hasClawhubMarker(cwd); + if (hasMarker) return cwd; - const clawdbotWorkspace = await resolveClawdbotDefaultWorkspace() - return clawdbotWorkspace ? resolve(clawdbotWorkspace) : cwd + const clawdbotWorkspace = await resolveClawdbotDefaultWorkspace(); + return clawdbotWorkspace ? resolve(clawdbotWorkspace) : cwd; } async function hasClawhubMarker(workdir: string) { - const lockfile = join(workdir, '.clawhub', 'lock.json') - if (await pathExists(lockfile)) return true - const markerDir = join(workdir, '.clawhub') - if (await pathExists(markerDir)) return true - const legacyLockfile = join(workdir, '.clawdhub', 'lock.json') - if (await pathExists(legacyLockfile)) return true - const legacyMarkerDir = join(workdir, '.clawdhub') - return pathExists(legacyMarkerDir) + const lockfile = join(workdir, ".clawhub", "lock.json"); + if (await pathExists(lockfile)) return true; + const markerDir = join(workdir, ".clawhub"); + if (await pathExists(markerDir)) return true; + const legacyLockfile = join(workdir, ".clawdhub", "lock.json"); + if (await pathExists(legacyLockfile)) return true; + const legacyMarkerDir = join(workdir, ".clawdhub"); + return pathExists(legacyMarkerDir); } async function pathExists(path: string) { try { - await stat(path) - return true + await stat(path); + return true; } catch { - return false + return false; } } program - .command('login') - .description('Log in (opens browser or stores token)') - .option('--token ', 'API token') - .option('--label