diff --git a/app/api/accounts/[id]/route.ts b/app/api/accounts/[id]/route.ts index 3ed40db1..42b388bf 100644 --- a/app/api/accounts/[id]/route.ts +++ b/app/api/accounts/[id]/route.ts @@ -23,6 +23,7 @@ export async function OPTIONS() { * - id (required): The unique identifier of the account (UUID) * * @param request - The request object + * @param params.params * @param params - Route params containing the account ID * @returns A NextResponse with account data */ diff --git a/app/api/admins/coding/slack/route.ts b/app/api/admins/coding/slack/route.ts index ea880d30..956d7b4e 100644 --- a/app/api/admins/coding/slack/route.ts +++ b/app/api/admins/coding/slack/route.ts @@ -9,6 +9,8 @@ import { getSlackTagsHandler } from "@/lib/admins/slack/getSlackTagsHandler"; * Pulls directly from the Slack API as the source of truth. * Supports period filtering: all (default), daily, weekly, monthly. * Requires admin authentication. + * + * @param request */ export async function GET(request: NextRequest): Promise { return getSlackTagsHandler(request); diff --git a/app/api/admins/privy/route.ts b/app/api/admins/privy/route.ts index 073bac60..d22ec616 100644 --- a/app/api/admins/privy/route.ts +++ b/app/api/admins/privy/route.ts @@ -8,11 +8,16 @@ import { getPrivyLoginsHandler } from "@/lib/admins/privy/getPrivyLoginsHandler" * Returns Privy login statistics for the requested time period. * Supports daily (last 24h), weekly (last 7 days), and monthly (last 30 days) periods. * Requires admin authentication. + * + * @param request */ export async function GET(request: NextRequest): Promise { return getPrivyLoginsHandler(request); } +/** + * + */ export async function OPTIONS(): Promise { return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); } diff --git a/app/api/content/analyze/route.ts b/app/api/content/analyze/route.ts new file mode 100644 index 00000000..1e7e4594 --- /dev/null +++ b/app/api/content/analyze/route.ts @@ -0,0 +1,13 @@ +import { createAnalyzeHandler } from "@/lib/content/primitives/createAnalyzeHandler"; +import { createPrimitiveRoute } from "@/lib/content/primitives/createPrimitiveRoute"; + +/** + * POST /api/content/analyze + * + * Analyze a video and generate text based on its content. + */ +export const { OPTIONS, POST } = createPrimitiveRoute(createAnalyzeHandler); + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/caption/route.ts b/app/api/content/caption/route.ts new file mode 100644 index 00000000..1160b744 --- /dev/null +++ b/app/api/content/caption/route.ts @@ -0,0 +1,13 @@ +import { createTextHandler } from "@/lib/content/primitives/createTextHandler"; +import { createPrimitiveRoute } from "@/lib/content/primitives/createPrimitiveRoute"; + +/** + * POST /api/content/caption + * + * Generate on-screen caption text for a social video. + */ +export const { OPTIONS, POST } = createPrimitiveRoute(createTextHandler); + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/image/route.ts b/app/api/content/image/route.ts new file mode 100644 index 00000000..511ac1c5 --- /dev/null +++ b/app/api/content/image/route.ts @@ -0,0 +1,13 @@ +import { createImageHandler } from "@/lib/content/primitives/createImageHandler"; +import { createPrimitiveRoute } from "@/lib/content/primitives/createPrimitiveRoute"; + +/** + * POST /api/content/image + * + * Generate an image from a prompt and optional reference image. + */ +export const { OPTIONS, POST } = createPrimitiveRoute(createImageHandler); + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/route.ts b/app/api/content/route.ts new file mode 100644 index 00000000..50737e0e --- /dev/null +++ b/app/api/content/route.ts @@ -0,0 +1,15 @@ +import { editHandler } from "@/lib/content/primitives/editHandler"; +import { primitiveOptionsHandler } from "@/lib/content/primitives/createPrimitiveRoute"; + +export { primitiveOptionsHandler as OPTIONS }; + +/** + * PATCH /api/content + * + * Edit media with operations or a template preset. + */ +export { editHandler as PATCH }; + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/templates/[id]/route.ts b/app/api/content/templates/[id]/route.ts new file mode 100644 index 00000000..a7eee83f --- /dev/null +++ b/app/api/content/templates/[id]/route.ts @@ -0,0 +1,26 @@ +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { getContentTemplateDetailHandler } from "@/lib/content/getContentTemplateDetailHandler"; + +/** + * OPTIONS handler for CORS preflight requests. + * + * @returns Empty 204 response with CORS headers. + */ +export async function OPTIONS() { + return new NextResponse(null, { + status: 204, + headers: getCorsHeaders(), + }); +} + +/** + * GET /api/content/templates/[id] + * + * Returns the full template configuration for a given template id. + */ +export { getContentTemplateDetailHandler as GET }; + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/transcribe/route.ts b/app/api/content/transcribe/route.ts new file mode 100644 index 00000000..0b4e63d7 --- /dev/null +++ b/app/api/content/transcribe/route.ts @@ -0,0 +1,13 @@ +import { createAudioHandler } from "@/lib/content/primitives/createAudioHandler"; +import { createPrimitiveRoute } from "@/lib/content/primitives/createPrimitiveRoute"; + +/** + * POST /api/content/transcribe + * + * Transcribe a song into timestamped lyrics. + */ +export const { OPTIONS, POST } = createPrimitiveRoute(createAudioHandler); + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/upscale/route.ts b/app/api/content/upscale/route.ts new file mode 100644 index 00000000..b7218999 --- /dev/null +++ b/app/api/content/upscale/route.ts @@ -0,0 +1,13 @@ +import { createUpscaleHandler } from "@/lib/content/primitives/createUpscaleHandler"; +import { createPrimitiveRoute } from "@/lib/content/primitives/createPrimitiveRoute"; + +/** + * POST /api/content/upscale + * + * Upscale an image or video to higher resolution. + */ +export const { OPTIONS, POST } = createPrimitiveRoute(createUpscaleHandler); + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/content/video/route.ts b/app/api/content/video/route.ts new file mode 100644 index 00000000..590c552d --- /dev/null +++ b/app/api/content/video/route.ts @@ -0,0 +1,15 @@ +import { createVideoHandler } from "@/lib/content/primitives/createVideoHandler"; +import { primitiveOptionsHandler } from "@/lib/content/primitives/createPrimitiveRoute"; + +export { primitiveOptionsHandler as OPTIONS }; + +/** + * POST /api/content/video + * + * Generate a video from a prompt, image, or existing video. + */ +export { createVideoHandler as POST }; + +export const dynamic = "force-dynamic"; +export const fetchCache = "force-no-store"; +export const revalidate = 0; diff --git a/app/api/songs/analyze/presets/route.ts b/app/api/songs/analyze/presets/route.ts index 8baccd38..b809394c 100644 --- a/app/api/songs/analyze/presets/route.ts +++ b/app/api/songs/analyze/presets/route.ts @@ -28,6 +28,7 @@ export async function OPTIONS() { * - status: "success" * - presets: Array of { name, label, description, requiresAudio, responseFormat } * + * @param request * @returns A NextResponse with the list of available presets */ export async function GET(request: NextRequest): Promise { diff --git a/app/api/transcribe/route.ts b/app/api/transcribe/route.ts index 28cf4261..0896806b 100644 --- a/app/api/transcribe/route.ts +++ b/app/api/transcribe/route.ts @@ -2,6 +2,10 @@ import { NextRequest, NextResponse } from "next/server"; import { processAudioTranscription } from "@/lib/transcribe/processAudioTranscription"; import { formatTranscriptionError } from "@/lib/transcribe/types"; +/** + * + * @param req + */ export async function POST(req: NextRequest) { try { const body = await req.json(); diff --git a/lib/admins/emails/__tests__/validateGetAdminEmailsQuery.test.ts b/lib/admins/emails/__tests__/validateGetAdminEmailsQuery.test.ts index 90e1a3d0..7531a477 100644 --- a/lib/admins/emails/__tests__/validateGetAdminEmailsQuery.test.ts +++ b/lib/admins/emails/__tests__/validateGetAdminEmailsQuery.test.ts @@ -12,6 +12,10 @@ vi.mock("@/lib/admins/validateAdminAuth", () => ({ validateAdminAuth: vi.fn(), })); +/** + * + * @param url + */ function createMockRequest(url: string): NextRequest { return { url, diff --git a/lib/admins/pr/__tests__/getPrMergedStatusHandler.test.ts b/lib/admins/pr/__tests__/getPrMergedStatusHandler.test.ts index e007e9c8..826b69d6 100644 --- a/lib/admins/pr/__tests__/getPrMergedStatusHandler.test.ts +++ b/lib/admins/pr/__tests__/getPrMergedStatusHandler.test.ts @@ -19,6 +19,10 @@ vi.mock("@/lib/github/fetchGithubPrStatus", () => ({ const PR_URL_1 = "https://github.com/recoupable/api/pull/42"; const PR_URL_2 = "https://github.com/recoupable/chat/pull/100"; +/** + * + * @param urls + */ function makeRequest(urls: string[] = [PR_URL_1]) { const params = new URLSearchParams(); urls.forEach(url => params.append("pull_requests", url)); diff --git a/lib/admins/pr/getPrStatusHandler.ts b/lib/admins/pr/getPrStatusHandler.ts index 27081718..73cefa94 100644 --- a/lib/admins/pr/getPrStatusHandler.ts +++ b/lib/admins/pr/getPrStatusHandler.ts @@ -10,6 +10,8 @@ import { fetchGithubPrStatus } from "@/lib/github/fetchGithubPrStatus"; * Uses the GitHub REST API to check each PR's state. * * Requires admin authentication. + * + * @param request */ export async function getPrStatusHandler(request: NextRequest): Promise { try { diff --git a/lib/admins/privy/countNewAccounts.ts b/lib/admins/privy/countNewAccounts.ts index 012ced53..1d34a14a 100644 --- a/lib/admins/privy/countNewAccounts.ts +++ b/lib/admins/privy/countNewAccounts.ts @@ -5,6 +5,9 @@ import { getCutoffMs } from "./getCutoffMs"; /** * Counts how many users in the list were created within the cutoff period. + * + * @param users + * @param period */ export function countNewAccounts(users: User[], period: PrivyLoginsPeriod): number { const cutoffMs = getCutoffMs(period); diff --git a/lib/admins/privy/fetchPrivyLogins.ts b/lib/admins/privy/fetchPrivyLogins.ts index ae4d4dd0..35ac556c 100644 --- a/lib/admins/privy/fetchPrivyLogins.ts +++ b/lib/admins/privy/fetchPrivyLogins.ts @@ -20,6 +20,10 @@ export type FetchPrivyLoginsResult = { totalPrivyUsers: number; }; +/** + * + * @param period + */ export async function fetchPrivyLogins(period: PrivyLoginsPeriod): Promise { const isAll = period === "all"; const cutoffMs = getCutoffMs(period); diff --git a/lib/admins/privy/getCutoffMs.ts b/lib/admins/privy/getCutoffMs.ts index 8b80ec6a..4de0fa32 100644 --- a/lib/admins/privy/getCutoffMs.ts +++ b/lib/admins/privy/getCutoffMs.ts @@ -5,6 +5,8 @@ import { PERIOD_DAYS } from "./periodDays"; * Returns the cutoff timestamp in milliseconds for a given period. * Uses midnight UTC calendar day boundaries to match Privy dashboard behavior. * Returns 0 for "all" (no cutoff). + * + * @param period */ export function getCutoffMs(period: PrivyLoginsPeriod): number { if (period === "all") return 0; diff --git a/lib/admins/privy/getLatestVerifiedAt.ts b/lib/admins/privy/getLatestVerifiedAt.ts index 465ea876..c7f7ba9b 100644 --- a/lib/admins/privy/getLatestVerifiedAt.ts +++ b/lib/admins/privy/getLatestVerifiedAt.ts @@ -4,6 +4,8 @@ import type { User } from "@privy-io/node"; /** * Returns the most recent latest_verified_at (in ms) across all linked_accounts for a Privy user. * Returns null if no linked account has a latest_verified_at. + * + * @param user */ export function getLatestVerifiedAt(user: User): number | null { const linkedAccounts = user.linked_accounts; diff --git a/lib/admins/privy/toMs.ts b/lib/admins/privy/toMs.ts index 472ff9eb..2daad687 100644 --- a/lib/admins/privy/toMs.ts +++ b/lib/admins/privy/toMs.ts @@ -1,6 +1,8 @@ /** * Normalizes a Privy timestamp to milliseconds. * Privy docs say milliseconds but examples show seconds (10 digits). + * + * @param timestamp */ export function toMs(timestamp: number): number { return timestamp > 1e12 ? timestamp : timestamp * 1000; diff --git a/lib/agents/content/__tests__/handleContentAgentCallback.test.ts b/lib/agents/content/__tests__/handleContentAgentCallback.test.ts index 36fa4ea1..5edf71a3 100644 --- a/lib/agents/content/__tests__/handleContentAgentCallback.test.ts +++ b/lib/agents/content/__tests__/handleContentAgentCallback.test.ts @@ -84,6 +84,10 @@ describe("handleContentAgentCallback", () => { }); describe("completed callback with videos", () => { + /** + * + * @param body + */ function makeAuthRequest(body: object) { return new Request("http://localhost/api/content-agent/callback", { method: "POST", @@ -92,6 +96,9 @@ describe("handleContentAgentCallback", () => { }); } + /** + * + */ function mockThread() { const thread = { post: vi.fn().mockResolvedValue(undefined), diff --git a/lib/ai/getModel.ts b/lib/ai/getModel.ts index edf4d425..99ca9c2f 100644 --- a/lib/ai/getModel.ts +++ b/lib/ai/getModel.ts @@ -3,6 +3,7 @@ import { GatewayLanguageModelEntry } from "@ai-sdk/gateway"; /** * Returns a specific model by its ID from the list of available models. + * * @param modelId - The ID of the model to find * @returns The matching model or undefined if not found */ diff --git a/lib/ai/isEmbedModel.ts b/lib/ai/isEmbedModel.ts index 7c5fbbfb..4901f1e8 100644 --- a/lib/ai/isEmbedModel.ts +++ b/lib/ai/isEmbedModel.ts @@ -3,6 +3,8 @@ import { GatewayLanguageModelEntry } from "@ai-sdk/gateway"; /** * Determines if a model is an embedding model (not suitable for chat). * Embed models typically have 0 output pricing since they only produce embeddings. + * + * @param m */ export const isEmbedModel = (m: GatewayLanguageModelEntry): boolean => { const pricing = m.pricing; diff --git a/lib/artists/__tests__/createArtistPostHandler.test.ts b/lib/artists/__tests__/createArtistPostHandler.test.ts index e63d244d..dd72b2e1 100644 --- a/lib/artists/__tests__/createArtistPostHandler.test.ts +++ b/lib/artists/__tests__/createArtistPostHandler.test.ts @@ -14,6 +14,11 @@ vi.mock("@/lib/auth/validateAuthContext", () => ({ validateAuthContext: (...args: unknown[]) => mockValidateAuthContext(...args), })); +/** + * + * @param body + * @param headers + */ function createRequest(body: unknown, headers: Record = {}): NextRequest { const defaultHeaders: Record = { "Content-Type": "application/json", diff --git a/lib/artists/__tests__/validateCreateArtistBody.test.ts b/lib/artists/__tests__/validateCreateArtistBody.test.ts index 4de5562b..d12fe1ba 100644 --- a/lib/artists/__tests__/validateCreateArtistBody.test.ts +++ b/lib/artists/__tests__/validateCreateArtistBody.test.ts @@ -9,6 +9,11 @@ vi.mock("@/lib/auth/validateAuthContext", () => ({ validateAuthContext: (...args: unknown[]) => mockValidateAuthContext(...args), })); +/** + * + * @param body + * @param headers + */ function createRequest(body: unknown, headers: Record = {}): NextRequest { const defaultHeaders: Record = { "Content-Type": "application/json" }; return new NextRequest("http://localhost/api/artists", { diff --git a/lib/auth/__tests__/validateAuthContext.test.ts b/lib/auth/__tests__/validateAuthContext.test.ts index 31dda345..c4769178 100644 --- a/lib/auth/__tests__/validateAuthContext.test.ts +++ b/lib/auth/__tests__/validateAuthContext.test.ts @@ -33,6 +33,10 @@ const mockGetAuthenticatedAccountId = vi.mocked(getAuthenticatedAccountId); const mockValidateOrganizationAccess = vi.mocked(validateOrganizationAccess); const mockCanAccessAccount = vi.mocked(canAccessAccount); +/** + * + * @param headers + */ function createMockRequest(headers: Record = {}): Request { return { headers: { diff --git a/lib/catalog/formatCatalogSongsAsCSV.ts b/lib/catalog/formatCatalogSongsAsCSV.ts index 5115eece..29cc443c 100644 --- a/lib/catalog/formatCatalogSongsAsCSV.ts +++ b/lib/catalog/formatCatalogSongsAsCSV.ts @@ -2,6 +2,8 @@ import { CatalogSong } from "./getCatalogSongs"; /** * Formats catalog songs into the CSV-like format expected by the scorer + * + * @param songs */ export function formatCatalogSongsAsCSV(songs: CatalogSong[]): string { const csvLines = songs.map(song => { diff --git a/lib/catalog/getCatalogDataAsCSV.ts b/lib/catalog/getCatalogDataAsCSV.ts index ea529c37..4a86fc0e 100644 --- a/lib/catalog/getCatalogDataAsCSV.ts +++ b/lib/catalog/getCatalogDataAsCSV.ts @@ -3,6 +3,8 @@ import { formatCatalogSongsAsCSV } from "./formatCatalogSongsAsCSV"; /** * Gets all catalog songs and formats them as CSV for the scorer + * + * @param catalogId */ export async function getCatalogDataAsCSV(catalogId: string): Promise { const allSongs: CatalogSong[] = []; diff --git a/lib/catalog/getCatalogSongs.ts b/lib/catalog/getCatalogSongs.ts index c58c33be..d7b5ca62 100644 --- a/lib/catalog/getCatalogSongs.ts +++ b/lib/catalog/getCatalogSongs.ts @@ -25,6 +25,13 @@ export interface CatalogSongsResponse { error?: string; } +/** + * + * @param catalogId + * @param pageSize + * @param page + * @param artistName + */ export async function getCatalogSongs( catalogId: string, pageSize: number = 100, diff --git a/lib/catalog/getCatalogs.ts b/lib/catalog/getCatalogs.ts index 9533183b..4ac8a842 100644 --- a/lib/catalog/getCatalogs.ts +++ b/lib/catalog/getCatalogs.ts @@ -8,6 +8,10 @@ export interface CatalogsResponse { error?: string; } +/** + * + * @param accountId + */ export async function getCatalogs(accountId: string): Promise { try { const response = await fetch( diff --git a/lib/chat/__tests__/integration/chatEndToEnd.test.ts b/lib/chat/__tests__/integration/chatEndToEnd.test.ts index 25841a5e..43fac73b 100644 --- a/lib/chat/__tests__/integration/chatEndToEnd.test.ts +++ b/lib/chat/__tests__/integration/chatEndToEnd.test.ts @@ -170,6 +170,11 @@ const mockDeductCredits = vi.mocked(deductCredits); const mockGenerateChatTitle = vi.mocked(generateChatTitle); // Helper to create mock NextRequest +/** + * + * @param body + * @param headers + */ function createMockRequest(body: unknown, headers: Record = {}): Request { return { json: () => Promise.resolve(body), diff --git a/lib/chat/toolChains/getPrepareStepResult.ts b/lib/chat/toolChains/getPrepareStepResult.ts index 02dd8e71..c011c078 100644 --- a/lib/chat/toolChains/getPrepareStepResult.ts +++ b/lib/chat/toolChains/getPrepareStepResult.ts @@ -12,6 +12,8 @@ type PrepareStepOptions = { /** * Returns the next tool to run based on timeline progression through tool chains. * Uses toolCallsContent to track exact execution order and position in sequence. + * + * @param options */ const getPrepareStepResult = (options: PrepareStepOptions): PrepareStepResult | undefined => { const { steps } = options; diff --git a/lib/chats/__tests__/createChatHandler.test.ts b/lib/chats/__tests__/createChatHandler.test.ts index 6d509147..3258d6cf 100644 --- a/lib/chats/__tests__/createChatHandler.test.ts +++ b/lib/chats/__tests__/createChatHandler.test.ts @@ -41,6 +41,10 @@ vi.mock("../generateChatTitle", () => ({ generateChatTitle: vi.fn(), })); +/** + * + * @param headers + */ function createMockRequest( headers: Record = { "x-api-key": "test-api-key" }, ): NextRequest { diff --git a/lib/chats/processCompactChatRequest.ts b/lib/chats/processCompactChatRequest.ts index a1699c93..c98c2e97 100644 --- a/lib/chats/processCompactChatRequest.ts +++ b/lib/chats/processCompactChatRequest.ts @@ -17,6 +17,9 @@ interface ProcessCompactChatRequestParams { * Verifies the chat exists and the user has access before compacting. * * @param params - The parameters for processing the chat compaction. + * @param params.chatId + * @param params.prompt + * @param params.accountId * @returns The result of the compaction attempt. */ export async function processCompactChatRequest({ diff --git a/lib/coding-agent/__tests__/handleGitHubWebhook.test.ts b/lib/coding-agent/__tests__/handleGitHubWebhook.test.ts index 5e059f4e..194a7170 100644 --- a/lib/coding-agent/__tests__/handleGitHubWebhook.test.ts +++ b/lib/coding-agent/__tests__/handleGitHubWebhook.test.ts @@ -45,6 +45,12 @@ const BASE_PAYLOAD = { }, }; +/** + * + * @param body + * @param event + * @param signature + */ function makeRequest(body: unknown, event = "issue_comment", signature = "valid") { return { text: () => Promise.resolve(JSON.stringify(body)), diff --git a/lib/coding-agent/__tests__/onMergeTestToMainAction.test.ts b/lib/coding-agent/__tests__/onMergeTestToMainAction.test.ts index 8af470e1..f173d6ce 100644 --- a/lib/coding-agent/__tests__/onMergeTestToMainAction.test.ts +++ b/lib/coding-agent/__tests__/onMergeTestToMainAction.test.ts @@ -12,6 +12,9 @@ beforeEach(() => { process.env.GITHUB_TOKEN = "ghp_test"; }); +/** + * + */ function createMockBot() { return { onAction: vi.fn() } as any; } diff --git a/lib/coding-agent/encodeGitHubThreadId.ts b/lib/coding-agent/encodeGitHubThreadId.ts index 1cfff2fe..f4797e43 100644 --- a/lib/coding-agent/encodeGitHubThreadId.ts +++ b/lib/coding-agent/encodeGitHubThreadId.ts @@ -6,6 +6,8 @@ import type { GitHubThreadId } from "@chat-adapter/github"; * * - PR-level: `github:{owner}/{repo}:{prNumber}` * - Review comment: `github:{owner}/{repo}:{prNumber}:rc:{reviewCommentId}` + * + * @param thread */ export function encodeGitHubThreadId(thread: GitHubThreadId): string { const { owner, repo, prNumber, reviewCommentId } = thread; diff --git a/lib/coding-agent/handleMergeSuccess.ts b/lib/coding-agent/handleMergeSuccess.ts index f026f48d..c241923b 100644 --- a/lib/coding-agent/handleMergeSuccess.ts +++ b/lib/coding-agent/handleMergeSuccess.ts @@ -7,6 +7,8 @@ import type { CodingAgentThreadState } from "./types"; * Handles post-merge cleanup after all PRs merged successfully. * Deletes the shared PR state keys for all repos and persists the latest * snapshot via upsertAccountSnapshot. + * + * @param state */ export async function handleMergeSuccess(state: CodingAgentThreadState): Promise { try { diff --git a/lib/coding-agent/parseMergeActionId.ts b/lib/coding-agent/parseMergeActionId.ts index 5118249e..25fd3eeb 100644 --- a/lib/coding-agent/parseMergeActionId.ts +++ b/lib/coding-agent/parseMergeActionId.ts @@ -1,6 +1,8 @@ /** * Parses a merge action ID like "merge_pr:recoupable/api#42" * into { repo, number } or null if the format doesn't match. + * + * @param actionId */ export function parseMergeActionId(actionId: string) { const match = actionId.match(/^merge_pr:(.+)#(\d+)$/); diff --git a/lib/coding-agent/parseMergeTestToMainActionId.ts b/lib/coding-agent/parseMergeTestToMainActionId.ts index 1228615f..14133eac 100644 --- a/lib/coding-agent/parseMergeTestToMainActionId.ts +++ b/lib/coding-agent/parseMergeTestToMainActionId.ts @@ -1,6 +1,8 @@ /** * Parses a merge_test_to_main action ID like "merge_test_to_main:recoupable/api" * into the repo string, or null if the format doesn't match. + * + * @param actionId */ export function parseMergeTestToMainActionId(actionId: string): string | null { const prefix = "merge_test_to_main:"; diff --git a/lib/composio/getCallbackUrl.ts b/lib/composio/getCallbackUrl.ts index 570c9251..8c83505a 100644 --- a/lib/composio/getCallbackUrl.ts +++ b/lib/composio/getCallbackUrl.ts @@ -19,6 +19,7 @@ interface CallbackOptions { * * @param options.destination - Where to redirect: "chat" or "connectors" * @param options.roomId - For chat destination, the room ID to return to + * @param options * @returns Full callback URL with success indicator */ export function getCallbackUrl(options: CallbackOptions): string { diff --git a/lib/content/__tests__/getContentTemplateDetailHandler.test.ts b/lib/content/__tests__/getContentTemplateDetailHandler.test.ts new file mode 100644 index 00000000..8820f81e --- /dev/null +++ b/lib/content/__tests__/getContentTemplateDetailHandler.test.ts @@ -0,0 +1,94 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { NextRequest, NextResponse } from "next/server"; +import { getContentTemplateDetailHandler } from "@/lib/content/getContentTemplateDetailHandler"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { loadTemplate } from "@/lib/content/templates"; + +vi.mock("@/lib/networking/getCorsHeaders", () => ({ + getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), +})); + +vi.mock("@/lib/auth/validateAuthContext", () => ({ + validateAuthContext: vi.fn(), +})); + +vi.mock("@/lib/content/templates", () => ({ + loadTemplate: vi.fn(), +})); + +describe("getContentTemplateDetailHandler", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("returns 401 when not authenticated", async () => { + vi.mocked(validateAuthContext).mockResolvedValue( + NextResponse.json({ status: "error", error: "Unauthorized" }, { status: 401 }), + ); + const request = new NextRequest("http://localhost/api/content/templates/bedroom", { + method: "GET", + }); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "bedroom" }), + }); + + expect(result.status).toBe(401); + }); + + it("returns 404 for unknown template", async () => { + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "test-key", + }); + vi.mocked(loadTemplate).mockReturnValue(null); + + const request = new NextRequest("http://localhost/api/content/templates/nonexistent", { + method: "GET", + }); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "nonexistent" }), + }); + const body = await result.json(); + + expect(result.status).toBe(404); + expect(body.error).toBe("Template not found"); + }); + + it("returns full template for valid id", async () => { + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "test-key", + }); + const mockTemplate = { + id: "artist-caption-bedroom", + description: "Moody purple bedroom setting", + image: { prompt: "test", reference_images: [], style_rules: {} }, + video: { moods: ["calm"], movements: ["slow pan"] }, + caption: { guide: { tone: "dreamy", rules: [], formats: [] }, examples: [] }, + edit: { operations: [] }, + }; + vi.mocked(loadTemplate).mockReturnValue(mockTemplate); + + const request = new NextRequest( + "http://localhost/api/content/templates/artist-caption-bedroom", + { method: "GET" }, + ); + + const result = await getContentTemplateDetailHandler(request, { + params: Promise.resolve({ id: "artist-caption-bedroom" }), + }); + const body = await result.json(); + + expect(result.status).toBe(200); + expect(body.id).toBe("artist-caption-bedroom"); + expect(body.description).toBe("Moody purple bedroom setting"); + expect(body.image).toBeDefined(); + expect(body.video).toBeDefined(); + expect(body.caption).toBeDefined(); + expect(body.edit).toBeDefined(); + }); +}); diff --git a/lib/content/__tests__/validateCreateContentBody.test.ts b/lib/content/__tests__/validateCreateContentBody.test.ts index 1a71d5ae..658ef8d7 100644 --- a/lib/content/__tests__/validateCreateContentBody.test.ts +++ b/lib/content/__tests__/validateCreateContentBody.test.ts @@ -20,6 +20,10 @@ vi.mock("@/lib/content/resolveArtistSlug", () => ({ resolveArtistSlug: vi.fn().mockResolvedValue("gatsby-grace"), })); +/** + * + * @param body + */ function createRequest(body: unknown): NextRequest { return new NextRequest("http://localhost/api/content/create", { method: "POST", @@ -71,11 +75,25 @@ describe("validateCreateContentBody", () => { expect(result).not.toBeInstanceOf(NextResponse); if (!(result instanceof NextResponse)) { - expect(result.template).toBe("artist-caption-bedroom"); + expect(result.template).toBeUndefined(); expect(result.lipsync).toBe(false); } }); + it("accepts request without template", async () => { + const request = createRequest({ + artist_account_id: "550e8400-e29b-41d4-a716-446655440000", + }); + + const result = await validateCreateContentBody(request); + + expect(result).not.toBeInstanceOf(NextResponse); + if (!(result instanceof NextResponse)) { + expect(result.template).toBeUndefined(); + expect(result.artistAccountId).toBe("550e8400-e29b-41d4-a716-446655440000"); + } + }); + it("returns 400 when artist_account_id is missing", async () => { const request = createRequest({ template: "artist-caption-bedroom", diff --git a/lib/content/contentTemplates.ts b/lib/content/contentTemplates.ts index 179c7453..a9f71fa4 100644 --- a/lib/content/contentTemplates.ts +++ b/lib/content/contentTemplates.ts @@ -30,6 +30,10 @@ export const CONTENT_TEMPLATES: ContentTemplate[] = [ /** Derived from the first entry in CONTENT_TEMPLATES to avoid string duplication. */ export const DEFAULT_CONTENT_TEMPLATE = CONTENT_TEMPLATES[0].name; +/** + * + * @param template + */ export function isSupportedContentTemplate(template: string): boolean { return CONTENT_TEMPLATES.some(item => item.name === template); } diff --git a/lib/content/getArtistContentReadiness.ts b/lib/content/getArtistContentReadiness.ts index a902ce0f..9238598e 100644 --- a/lib/content/getArtistContentReadiness.ts +++ b/lib/content/getArtistContentReadiness.ts @@ -22,6 +22,11 @@ export interface ArtistContentReadiness { /** * Checks whether an artist has the expected files for content creation. * Searches the main repo and org submodule repos. + * + * @param root0 + * @param root0.accountId + * @param root0.artistAccountId + * @param root0.artistSlug */ export async function getArtistContentReadiness({ accountId, diff --git a/lib/content/getArtistFileTree.ts b/lib/content/getArtistFileTree.ts index 908855a0..b5392b52 100644 --- a/lib/content/getArtistFileTree.ts +++ b/lib/content/getArtistFileTree.ts @@ -4,6 +4,9 @@ import { getOrgRepoUrls } from "@/lib/github/getOrgRepoUrls"; /** * Gets the file tree that contains the artist, checking the main repo * first, then falling back to org submodule repos. + * + * @param githubRepo + * @param artistSlug */ export async function getArtistFileTree( githubRepo: string, diff --git a/lib/content/getArtistRootPrefix.ts b/lib/content/getArtistRootPrefix.ts index 5a777abe..bf81d48a 100644 --- a/lib/content/getArtistRootPrefix.ts +++ b/lib/content/getArtistRootPrefix.ts @@ -1,3 +1,8 @@ +/** + * + * @param paths + * @param artistSlug + */ export function getArtistRootPrefix(paths: string[], artistSlug: string): string { const preferredPrefix = `artists/${artistSlug}/`; if (paths.some(path => path.startsWith(preferredPrefix))) { diff --git a/lib/content/getContentTemplateDetailHandler.ts b/lib/content/getContentTemplateDetailHandler.ts new file mode 100644 index 00000000..6051b4c9 --- /dev/null +++ b/lib/content/getContentTemplateDetailHandler.ts @@ -0,0 +1,34 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { loadTemplate } from "@/lib/content/templates"; + +/** + * Handler for GET /api/content/templates/{id}. + * + * @param request - Incoming API request. + * @param params - Route params containing the template id. + * @returns The full template object, or 404 if not found. + */ +export async function getContentTemplateDetailHandler( + request: NextRequest, + { params }: { params: Promise<{ id: string }> }, +): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) { + return authResult; + } + + const { id } = await params; + const template = loadTemplate(id); + + if (!template) { + return NextResponse.json( + { status: "error", error: "Template not found" }, + { status: 404, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json(template, { status: 200, headers: getCorsHeaders() }); +} diff --git a/lib/content/getContentTemplatesHandler.ts b/lib/content/getContentTemplatesHandler.ts index d1a65d80..2bf6552d 100644 --- a/lib/content/getContentTemplatesHandler.ts +++ b/lib/content/getContentTemplatesHandler.ts @@ -2,7 +2,7 @@ import type { NextRequest } from "next/server"; import { NextResponse } from "next/server"; import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; import { validateAuthContext } from "@/lib/auth/validateAuthContext"; -import { CONTENT_TEMPLATES } from "@/lib/content/contentTemplates"; +import { listTemplates } from "@/lib/content/templates"; /** * Handler for GET /api/content/templates. @@ -18,7 +18,7 @@ export async function getContentTemplatesHandler(request: NextRequest): Promise< return NextResponse.json( { status: "success", - templates: CONTENT_TEMPLATES, + templates: listTemplates(), }, { status: 200, headers: getCorsHeaders() }, ); diff --git a/lib/content/getContentValidateHandler.ts b/lib/content/getContentValidateHandler.ts index e0c758b8..81cd0ce8 100644 --- a/lib/content/getContentValidateHandler.ts +++ b/lib/content/getContentValidateHandler.ts @@ -8,6 +8,8 @@ import { getArtistContentReadiness } from "@/lib/content/getArtistContentReadine * Handler for GET /api/content/validate. * NOTE: Phase 1 returns structural readiness scaffolding. Deep filesystem checks * are performed in the background task before spend-heavy steps. + * + * @param request */ export async function getContentValidateHandler(request: NextRequest): Promise { const validated = await validateGetContentValidateQuery(request); diff --git a/lib/content/isCompletedRun.ts b/lib/content/isCompletedRun.ts index 855ea068..951d20b2 100644 --- a/lib/content/isCompletedRun.ts +++ b/lib/content/isCompletedRun.ts @@ -5,6 +5,10 @@ export type TriggerRunLike = { output?: unknown; }; +/** + * + * @param run + */ export function isCompletedRun(run: TriggerRunLike): boolean { return run.status === "COMPLETED"; } diff --git a/lib/content/persistCreateContentRunVideo.ts b/lib/content/persistCreateContentRunVideo.ts index 25a77eed..69bac792 100644 --- a/lib/content/persistCreateContentRunVideo.ts +++ b/lib/content/persistCreateContentRunVideo.ts @@ -27,6 +27,8 @@ type CreateContentOutput = { * and returns the run with normalized output. * * This keeps Supabase writes in API only. + * + * @param run */ export async function persistCreateContentRunVideo(run: T): Promise { if (run.taskIdentifier !== CREATE_CONTENT_TASK_ID || !isCompletedRun(run)) { diff --git a/lib/content/primitives/__tests__/createAnalyzeHandler.test.ts b/lib/content/primitives/__tests__/createAnalyzeHandler.test.ts new file mode 100644 index 00000000..c416a60b --- /dev/null +++ b/lib/content/primitives/__tests__/createAnalyzeHandler.test.ts @@ -0,0 +1,144 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { NextRequest, NextResponse } from "next/server"; + +vi.mock("@/lib/networking/getCorsHeaders", () => ({ + getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), +})); + +vi.mock("@/lib/auth/validateAuthContext", () => ({ + validateAuthContext: vi.fn(), +})); + +vi.mock("@/lib/networking/safeParseJson", () => ({ + safeParseJson: vi.fn(), +})); + +const { validateAuthContext } = await import("@/lib/auth/validateAuthContext"); +const { safeParseJson } = await import("@/lib/networking/safeParseJson"); +const { createAnalyzeHandler } = await import("../createAnalyzeHandler"); + +const VALID_BODY = { + video_url: "https://example.com/video.mp4", + prompt: "Describe what happens in this video", +}; + +describe("createAnalyzeHandler", () => { + const originalEnv = process.env; + + beforeEach(() => { + vi.clearAllMocks(); + process.env = { ...originalEnv, TWELVELABS_API_KEY: "test-key" }; + vi.mocked(validateAuthContext).mockResolvedValue({ + accountId: "acc_123", + orgId: null, + authToken: "tok", + }); + vi.mocked(safeParseJson).mockResolvedValue(VALID_BODY); + }); + + afterEach(() => { + process.env = originalEnv; + }); + + it("returns auth error when auth fails", async () => { + const authError = NextResponse.json({ error: "Unauthorized" }, { status: 401 }); + vi.mocked(validateAuthContext).mockResolvedValue(authError); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(401); + }); + + it("returns 500 when TWELVELABS_API_KEY is missing", async () => { + delete process.env.TWELVELABS_API_KEY; + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(500); + const body = await result.json(); + expect(body.error).toContain("TWELVELABS_API_KEY"); + }); + + it("returns analysis text on success", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce( + new Response( + JSON.stringify({ + data: "This video shows a cat playing piano.", + finish_reason: "stop", + usage: { output_tokens: 42 }, + }), + { status: 200 }, + ), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(200); + const body = await result.json(); + expect(body.text).toBe("This video shows a cat playing piano."); + expect(body.finish_reason).toBe("stop"); + expect(body.usage).toEqual({ output_tokens: 42 }); + }); + + it("returns 502 when Twelve Labs returns an error", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce(new Response("Bad Request", { status: 400 })); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(502); + const body = await result.json(); + expect(body.error).toContain("400"); + }); + + it("returns 502 when response has no data", async () => { + vi.spyOn(global, "fetch").mockResolvedValueOnce( + new Response(JSON.stringify({}), { status: 200 }), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + const result = await createAnalyzeHandler(request); + + expect(result.status).toBe(502); + const body = await result.json(); + expect(body.error).toContain("no text"); + }); + + it("sends correct body to Twelve Labs API", async () => { + const fetchSpy = vi + .spyOn(global, "fetch") + .mockResolvedValueOnce( + new Response( + JSON.stringify({ data: "result", finish_reason: "stop", usage: { output_tokens: 10 } }), + { status: 200 }, + ), + ); + + const request = new NextRequest("http://localhost/api/content/analyze", { + method: "POST", + }); + await createAnalyzeHandler(request); + + expect(fetchSpy).toHaveBeenCalledOnce(); + const [url, options] = fetchSpy.mock.calls[0]; + expect(url).toBe("https://api.twelvelabs.io/v1.3/analyze"); + const sentBody = JSON.parse(options?.body as string); + expect(sentBody.video).toEqual({ type: "url", url: "https://example.com/video.mp4" }); + expect(sentBody.prompt).toBe("Describe what happens in this video"); + expect(sentBody.stream).toBe(false); + expect(sentBody.temperature).toBe(0.2); + }); +}); diff --git a/lib/content/primitives/__tests__/schemas.test.ts b/lib/content/primitives/__tests__/schemas.test.ts new file mode 100644 index 00000000..99eed86b --- /dev/null +++ b/lib/content/primitives/__tests__/schemas.test.ts @@ -0,0 +1,324 @@ +import { describe, it, expect } from "vitest"; +import { + createImageBodySchema, + createVideoBodySchema, + createTextBodySchema, + createAudioBodySchema, + editBodySchema, + createUpscaleBodySchema, + createAnalyzeBodySchema, +} from "../schemas"; + +describe("createImageBodySchema", () => { + it("parses valid payload with prompt only", () => { + expect( + createImageBodySchema.safeParse({ + prompt: "a moody portrait", + }).success, + ).toBe(true); + }); + + it("parses valid payload with reference image", () => { + expect( + createImageBodySchema.safeParse({ + prompt: "portrait photo", + reference_image_url: "https://example.com/ref.png", + }).success, + ).toBe(true); + }); + + it("parses empty payload (all fields optional)", () => { + expect(createImageBodySchema.safeParse({}).success).toBe(true); + }); + + it("accepts custom model", () => { + const result = createImageBodySchema.safeParse({ + prompt: "test", + model: "fal-ai/some-other-model", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.model).toBe("fal-ai/some-other-model"); + }); +}); + +describe("createVideoBodySchema", () => { + it("parses prompt-only payload", () => { + expect( + createVideoBodySchema.safeParse({ + prompt: "a calm ocean", + }).success, + ).toBe(true); + }); + + it("parses animate mode with image", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "animate", + image_url: "https://example.com/img.png", + prompt: "make it move", + }).success, + ).toBe(true); + }); + + it("parses extend mode with video", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "extend", + video_url: "https://example.com/clip.mp4", + prompt: "continue the scene", + }).success, + ).toBe(true); + }); + + it("parses first-last mode with two images", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "first-last", + image_url: "https://example.com/start.png", + end_image_url: "https://example.com/end.png", + prompt: "transition between these", + }).success, + ).toBe(true); + }); + + it("parses lipsync mode", () => { + expect( + createVideoBodySchema.safeParse({ + mode: "lipsync", + image_url: "https://example.com/face.png", + audio_url: "https://example.com/audio.mp3", + }).success, + ).toBe(true); + }); + + it("defaults duration to 8s", () => { + const result = createVideoBodySchema.safeParse({ prompt: "test" }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.duration).toBe("8s"); + }); + + it("defaults generate_audio to false", () => { + const result = createVideoBodySchema.safeParse({ prompt: "test" }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.generate_audio).toBe(false); + }); + + it("parses video with template", () => { + expect( + createVideoBodySchema.safeParse({ + template: "artist-caption-bedroom", + prompt: "subtle motion", + }).success, + ).toBe(true); + }); +}); + +describe("createTextBodySchema", () => { + it("parses valid payload", () => { + expect( + createTextBodySchema.safeParse({ + topic: "a rainy day in the city", + }).success, + ).toBe(true); + }); + + it("defaults length to short", () => { + const result = createTextBodySchema.safeParse({ + topic: "test", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.length).toBe("short"); + }); + + it("rejects missing topic", () => { + expect(createTextBodySchema.safeParse({}).success).toBe(false); + }); +}); + +describe("createAudioBodySchema", () => { + it("parses valid payload with audio URLs", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: ["https://example.com/song.mp3"], + }).success, + ).toBe(true); + }); + + it("rejects non-URL strings", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: ["not-a-url"], + }).success, + ).toBe(false); + }); + + it("rejects empty array", () => { + expect( + createAudioBodySchema.safeParse({ + audio_urls: [], + }).success, + ).toBe(false); + }); + + it("accepts custom model", () => { + const result = createAudioBodySchema.safeParse({ + audio_urls: ["https://example.com/audio.mp3"], + model: "fal-ai/custom-whisper", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.model).toBe("fal-ai/custom-whisper"); + }); +}); + +describe("editBodySchema", () => { + it("parses manual mode with operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "trim", start: 10, duration: 15 }], + }).success, + ).toBe(true); + }); + + it("parses template mode", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + template: "artist-caption-bedroom", + }).success, + ).toBe(true); + }); + + it("rejects missing both template and operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + }).success, + ).toBe(false); + }); + + it("rejects missing all inputs", () => { + expect( + editBodySchema.safeParse({ + operations: [{ type: "trim", start: 0, duration: 5 }], + }).success, + ).toBe(false); + }); + + it("accepts audio_url as input", () => { + expect( + editBodySchema.safeParse({ + audio_url: "https://example.com/a.mp3", + operations: [{ type: "trim", start: 0, duration: 15 }], + }).success, + ).toBe(true); + }); + + it("parses overlay_text operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "overlay_text", content: "hello world" }], + }).success, + ).toBe(true); + }); + + it("parses mux_audio operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "mux_audio", audio_url: "https://example.com/a.mp3" }], + }).success, + ).toBe(true); + }); + + it("parses crop operation", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "crop", aspect: "9:16" }], + }).success, + ).toBe(true); + }); + + it("parses multiple operations", () => { + expect( + editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [ + { type: "trim", start: 30, duration: 15 }, + { type: "crop", aspect: "9:16" }, + { type: "overlay_text", content: "caption" }, + { type: "mux_audio", audio_url: "https://example.com/a.mp3" }, + ], + }).success, + ).toBe(true); + }); + + it("defaults output_format to mp4", () => { + const result = editBodySchema.safeParse({ + video_url: "https://example.com/v.mp4", + operations: [{ type: "trim", start: 0, duration: 5 }], + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.output_format).toBe("mp4"); + }); +}); + +describe("createUpscaleBodySchema", () => { + it("parses image upscale", () => { + expect( + createUpscaleBodySchema.safeParse({ + url: "https://example.com/img.png", + type: "image", + }).success, + ).toBe(true); + }); + + it("rejects invalid type", () => { + expect( + createUpscaleBodySchema.safeParse({ + url: "https://example.com/f", + type: "audio", + }).success, + ).toBe(false); + }); +}); + +describe("createAnalyzeBodySchema", () => { + it("parses valid payload", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "Describe what happens in this video", + }).success, + ).toBe(true); + }); + + it("defaults temperature to 0.2", () => { + const result = createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "Describe this video", + }); + expect(result.success).toBe(true); + if (result.success) expect(result.data.temperature).toBe(0.2); + }); + + it("rejects prompt exceeding 2000 chars", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "https://example.com/video.mp4", + prompt: "x".repeat(2001), + }).success, + ).toBe(false); + }); + + it("rejects invalid video_url", () => { + expect( + createAnalyzeBodySchema.safeParse({ + video_url: "not-a-url", + prompt: "Describe this video", + }).success, + ).toBe(false); + }); +}); diff --git a/lib/content/primitives/__tests__/validatePrimitiveBody.test.ts b/lib/content/primitives/__tests__/validatePrimitiveBody.test.ts new file mode 100644 index 00000000..52ab17f3 --- /dev/null +++ b/lib/content/primitives/__tests__/validatePrimitiveBody.test.ts @@ -0,0 +1,49 @@ +import { describe, it, expect, vi, beforeEach } from "vitest"; +import { NextRequest, NextResponse } from "next/server"; +import { z } from "zod"; +import { validatePrimitiveBody } from "../validatePrimitiveBody"; + +vi.mock("@/lib/networking/getCorsHeaders", () => ({ + getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), +})); + +vi.mock("@/lib/networking/safeParseJson", () => ({ + safeParseJson: vi.fn(), +})); + +const { safeParseJson } = await import("@/lib/networking/safeParseJson"); + +const testSchema = z.object({ + name: z.string().min(1), + value: z.number().optional(), +}); + +describe("validatePrimitiveBody", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("returns validated data on success", async () => { + vi.mocked(safeParseJson).mockResolvedValue({ name: "test" }); + + const request = new NextRequest("http://localhost/api/test", { + method: "POST", + }); + const result = await validatePrimitiveBody(request, testSchema); + + expect(result).not.toBeInstanceOf(NextResponse); + expect(result).toEqual({ name: "test" }); + }); + + it("returns 400 when schema validation fails", async () => { + vi.mocked(safeParseJson).mockResolvedValue({ name: "" }); + + const request = new NextRequest("http://localhost/api/test", { + method: "POST", + }); + const result = await validatePrimitiveBody(request, testSchema); + + expect(result).toBeInstanceOf(NextResponse); + expect((result as NextResponse).status).toBe(400); + }); +}); diff --git a/lib/content/primitives/configureFal.ts b/lib/content/primitives/configureFal.ts new file mode 100644 index 00000000..a18872d5 --- /dev/null +++ b/lib/content/primitives/configureFal.ts @@ -0,0 +1,21 @@ +import { NextResponse } from "next/server"; +import { fal } from "@fal-ai/client"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; + +/** + * Checks for FAL_KEY and configures the fal client. + * Returns null on success, or a 500 NextResponse if the key is missing. + * + * @returns Null if configured, or an error NextResponse. + */ +export function configureFal(): NextResponse | null { + const falKey = process.env.FAL_KEY; + if (!falKey) { + return NextResponse.json( + { status: "error", error: "FAL_KEY is not configured" }, + { status: 500, headers: getCorsHeaders() }, + ); + } + fal.config({ credentials: falKey }); + return null; +} diff --git a/lib/content/primitives/createAnalyzeHandler.ts b/lib/content/primitives/createAnalyzeHandler.ts new file mode 100644 index 00000000..011c504f --- /dev/null +++ b/lib/content/primitives/createAnalyzeHandler.ts @@ -0,0 +1,84 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { createAnalyzeBodySchema } from "./schemas"; + +const TWELVELABS_ANALYZE_URL = "https://api.twelvelabs.io/v1.3/analyze"; + +/** + * POST /api/content/analyze + * + * @param request - Incoming request with video URL and analysis prompt. + * @returns JSON with the generated analysis text. + */ +export async function createAnalyzeHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createAnalyzeBodySchema); + if (validated instanceof NextResponse) return validated; + + const apiKey = process.env.TWELVELABS_API_KEY; + if (!apiKey) { + return NextResponse.json( + { status: "error", error: "TWELVELABS_API_KEY is not configured" }, + { status: 500, headers: getCorsHeaders() }, + ); + } + + try { + const response = await fetch(TWELVELABS_ANALYZE_URL, { + method: "POST", + headers: { + "x-api-key": apiKey, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + video: { type: "url", url: validated.video_url }, + prompt: validated.prompt, + temperature: validated.temperature, + stream: false, + ...(validated.max_tokens && { max_tokens: validated.max_tokens }), + }), + }); + + if (!response.ok) { + const errorBody = await response.text(); + console.error("Twelve Labs analyze error:", response.status, errorBody); + return NextResponse.json( + { status: "error", error: `Video analysis failed: ${response.status}` }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + const json = (await response.json()) as { + data?: string; + finish_reason?: string; + usage?: { output_tokens?: number }; + }; + + if (!json.data) { + return NextResponse.json( + { status: "error", error: "Video analysis returned no text" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json( + { + text: json.data, + finish_reason: json.finish_reason ?? null, + usage: json.usage ?? null, + }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Video analysis error:", error); + return NextResponse.json( + { status: "error", error: "Video analysis failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/createAudioHandler.ts b/lib/content/primitives/createAudioHandler.ts new file mode 100644 index 00000000..b88d2d62 --- /dev/null +++ b/lib/content/primitives/createAudioHandler.ts @@ -0,0 +1,69 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { fal } from "@fal-ai/client"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { configureFal } from "./configureFal"; +import { createAudioBodySchema } from "./schemas"; + +const DEFAULT_MODEL = "fal-ai/whisper"; + +/** + * POST /api/content/transcribe + * + * @param request - Incoming request with audio URLs to transcribe. + * @returns JSON with transcription and timestamped segments. + */ +export async function createAudioHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createAudioBodySchema); + if (validated instanceof NextResponse) return validated; + + const falError = configureFal(); + if (falError) return falError; + + try { + const audioUrl = validated.audio_urls[0]; + + const result = await fal.subscribe(validated.model ?? DEFAULT_MODEL, { + input: { + audio_url: audioUrl, + task: "transcribe", + chunk_level: validated.chunk_level, + language: validated.language, + diarize: validated.diarize, + }, + }); + + const whisperData = result.data as unknown as { + text?: string; + chunks?: Array<{ timestamp: number[]; text: string }>; + }; + + const fullLyrics = whisperData.text ?? ""; + const segments = (whisperData.chunks ?? []).map(chunk => ({ + start: chunk.timestamp[0] ?? 0, + end: chunk.timestamp[1] ?? 0, + text: chunk.text?.trim() ?? "", + })); + + return NextResponse.json( + { + audioUrl, + fullLyrics, + segments, + segmentCount: segments.length, + }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Audio processing error:", error); + return NextResponse.json( + { status: "error", error: "Audio processing failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/createImageHandler.ts b/lib/content/primitives/createImageHandler.ts new file mode 100644 index 00000000..29ac1f13 --- /dev/null +++ b/lib/content/primitives/createImageHandler.ts @@ -0,0 +1,95 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { fal } from "@fal-ai/client"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { configureFal } from "./configureFal"; +import { createImageBodySchema } from "./schemas"; +import { loadTemplate } from "@/lib/content/templates"; + +const DEFAULT_T2I_MODEL = "fal-ai/nano-banana-2"; +const DEFAULT_EDIT_MODEL = "fal-ai/nano-banana-2/edit"; + +/** + * POST /api/content/image + * + * @param request - Incoming request with image generation parameters. + * @returns JSON with the generated image URL. + */ +export async function createImageHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createImageBodySchema); + if (validated instanceof NextResponse) return validated; + + const falError = configureFal(); + if (falError) return falError; + + try { + const tpl = validated.template ? loadTemplate(validated.template) : null; + + const prompt = validated.prompt ?? tpl?.image.prompt ?? "portrait photo, natural lighting"; + + const refImageUrl = + validated.reference_image_url ?? + (tpl?.image.reference_images.length + ? tpl.image.reference_images[Math.floor(Math.random() * tpl.image.reference_images.length)] + : undefined); + + const hasReferenceImages = refImageUrl || (validated.images && validated.images.length > 0); + + let model: string; + const input: Record = { + prompt: tpl?.image.style_rules + ? `${prompt}\n\nStyle rules: ${Object.entries(tpl.image.style_rules) + .map(([k, v]) => `${k}: ${Object.values(v).join(", ")}`) + .join(". ")}` + : prompt, + num_images: validated.num_images, + aspect_ratio: validated.aspect_ratio, + resolution: validated.resolution, + output_format: "png", + safety_tolerance: "6", + enable_web_search: true, + thinking_level: "high", + limit_generations: true, + }; + + if (hasReferenceImages) { + model = validated.model ?? DEFAULT_EDIT_MODEL; + const imageUrls: string[] = []; + if (refImageUrl) imageUrls.push(refImageUrl); + if (validated.images) imageUrls.push(...validated.images); + input.image_urls = imageUrls; + } else { + model = validated.model ?? DEFAULT_T2I_MODEL; + } + + const result = await fal.subscribe(model, { input }); + + const resultData = result.data as Record; + const imageList = resultData?.images as Array> | undefined; + + if (!imageList || imageList.length === 0) { + return NextResponse.json( + { status: "error", error: "Image generation returned no image" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + const urls = imageList.map(img => img.url as string).filter(Boolean); + + return NextResponse.json( + { imageUrl: urls[0], images: urls }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Image generation error:", error); + return NextResponse.json( + { status: "error", error: "Image generation failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/createPrimitiveRoute.ts b/lib/content/primitives/createPrimitiveRoute.ts new file mode 100644 index 00000000..dee32472 --- /dev/null +++ b/lib/content/primitives/createPrimitiveRoute.ts @@ -0,0 +1,29 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; + +type Handler = (request: NextRequest) => Promise; + +/** + * Creates the standard OPTIONS + POST exports for a content primitive route. + * Route segment config (dynamic, fetchCache, revalidate) must still be + * exported directly from the route file — Next.js requires static analysis. + * + * @param handler - The POST handler function for the route. + * @returns Object with OPTIONS and POST exports. + */ +/** + * Standard CORS preflight handler for content primitive routes. + * + * @returns 204 response with CORS headers. + */ +export async function primitiveOptionsHandler() { + return new NextResponse(null, { status: 204, headers: getCorsHeaders() }); +} + +export function createPrimitiveRoute(handler: Handler) { + return { + OPTIONS: primitiveOptionsHandler, + POST: handler, + }; +} diff --git a/lib/content/primitives/createTextHandler.ts b/lib/content/primitives/createTextHandler.ts new file mode 100644 index 00000000..9c69d8b8 --- /dev/null +++ b/lib/content/primitives/createTextHandler.ts @@ -0,0 +1,79 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { createTextBodySchema } from "./schemas"; +import generateText from "@/lib/ai/generateText"; +import { LIGHTWEIGHT_MODEL } from "@/lib/const"; +import { loadTemplate } from "@/lib/content/templates"; +import type { Template } from "@/lib/content/templates"; + +/** + * Builds the LLM prompt for caption generation, optionally with template guide. + * + * @param topic - Subject or theme for the caption. + * @param length - Desired caption length tier. + * @param tpl - Optional template with caption guide and examples. + * @returns Formatted prompt string. + */ +function composeCaptionPrompt(topic: string, length: string, tpl: Template | null): string { + let prompt = `Generate ONE short on-screen text for a social media video. +Topic: "${topic}" +Length: ${length} +Return ONLY the text, nothing else. No quotes.`; + + if (tpl?.caption.guide) { + const g = tpl.caption.guide; + prompt += `\n\nStyle: ${g.tone}`; + if (g.rules.length) prompt += `\nRules:\n${g.rules.map(r => `- ${r}`).join("\n")}`; + if (g.formats.length) prompt += `\nFormats to try:\n${g.formats.map(f => `- ${f}`).join("\n")}`; + } + + if (tpl?.caption.examples.length) { + prompt += `\n\nExamples of good captions:\n${tpl.caption.examples.map(e => `- "${e}"`).join("\n")}`; + } + + return prompt; +} + +/** + * POST /api/content/caption + * + * @param request - Incoming Next.js request with JSON body validated by the text primitive schema. + * @returns JSON with generated text styling fields, or an error NextResponse. + */ +export async function createTextHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createTextBodySchema); + if (validated instanceof NextResponse) return validated; + + try { + const tpl = validated.template ? loadTemplate(validated.template) : null; + const prompt = composeCaptionPrompt(validated.topic, validated.length, tpl); + const result = await generateText({ prompt, model: LIGHTWEIGHT_MODEL }); + + let content = result.text.trim(); + content = content.replace(/^["']|["']$/g, "").trim(); + + if (!content) { + return NextResponse.json( + { status: "error", error: "Text generation returned empty" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json( + { content, font: null, color: "white", borderColor: "black", maxFontSize: 42 }, + { status: 200, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Text generation error:", error); + return NextResponse.json( + { status: "error", error: "Text generation failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/createUpscaleHandler.ts b/lib/content/primitives/createUpscaleHandler.ts new file mode 100644 index 00000000..3c645183 --- /dev/null +++ b/lib/content/primitives/createUpscaleHandler.ts @@ -0,0 +1,64 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { fal } from "@fal-ai/client"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { configureFal } from "./configureFal"; +import { createUpscaleBodySchema } from "./schemas"; + +/** + * POST /api/content/upscale + * + * @param request - Incoming request with the URL and type to upscale. + * @returns JSON with the upscaled URL. + */ +export async function createUpscaleHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createUpscaleBodySchema); + if (validated instanceof NextResponse) return validated; + + const falError = configureFal(); + if (falError) return falError; + + try { + const model = + validated.type === "video" ? "fal-ai/seedvr/upscale/video" : "fal-ai/seedvr/upscale/image"; + + const inputKey = validated.type === "video" ? "video_url" : "image_url"; + + const input: Record = { + [inputKey]: validated.url, + upscale_factor: validated.upscale_factor, + }; + if (validated.target_resolution) { + input.upscale_mode = "target"; + input.target_resolution = validated.target_resolution; + } + + const result = await fal.subscribe(model as string, { input }); + + const resultData = result.data as Record; + const url = + validated.type === "video" + ? ((resultData?.video as Record)?.url as string | undefined) + : ((resultData?.image as Record)?.url as string | undefined); + + if (!url) { + return NextResponse.json( + { status: "error", error: "Upscale returned no result" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json({ url }, { status: 200, headers: getCorsHeaders() }); + } catch (error) { + console.error("Upscale error:", error); + return NextResponse.json( + { status: "error", error: "Upscale failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/createVideoHandler.ts b/lib/content/primitives/createVideoHandler.ts new file mode 100644 index 00000000..ef1ac039 --- /dev/null +++ b/lib/content/primitives/createVideoHandler.ts @@ -0,0 +1,143 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { fal } from "@fal-ai/client"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { configureFal } from "./configureFal"; +import { createVideoBodySchema } from "./schemas"; +import { loadTemplate } from "@/lib/content/templates"; + +const MODELS: Record = { + prompt: "fal-ai/veo3.1", + animate: "fal-ai/veo3.1/image-to-video", + reference: "fal-ai/veo3.1/reference-to-video", + extend: "fal-ai/veo3.1/extend-video", + "first-last": "fal-ai/veo3.1/first-last-frame-to-video", + lipsync: "fal-ai/ltx-2-19b/audio-to-video", +}; + +/** + * Infers the mode from the inputs when the caller doesn't specify one. + * + * @param v - Validated request body. + * @returns The inferred mode string. + */ +function inferMode(v: { + audio_url?: string; + video_url?: string; + image_url?: string; + end_image_url?: string; +}): string { + if (v.audio_url && v.image_url) return "lipsync"; + if (v.video_url) return "extend"; + if (v.image_url && v.end_image_url) return "first-last"; + if (v.image_url) return "animate"; + return "prompt"; +} + +/** + * Maps user-facing fields to the fal input format for each mode. + * Different fal models expect different field names for the same concept. + * + * @param mode - The resolved video generation mode. + * @param v - Validated request body. + * @returns The fal input object with mode-specific field mappings. + */ +function buildFalInput( + mode: string, + v: { + prompt?: string; + negative_prompt?: string; + image_url?: string; + end_image_url?: string; + video_url?: string; + audio_url?: string; + aspect_ratio: string; + duration: string; + resolution: string; + generate_audio: boolean; + }, +): Record { + const input: Record = { + prompt: v.prompt ?? "", + aspect_ratio: v.aspect_ratio, + duration: v.duration, + resolution: v.resolution, + generate_audio: v.generate_audio, + safety_tolerance: "6", + auto_fix: true, + }; + + if (v.negative_prompt) input.negative_prompt = v.negative_prompt; + + if (mode === "reference" && v.image_url) { + input.image_urls = [v.image_url]; + } else if (mode === "first-last" && v.image_url) { + input.first_frame_url = v.image_url; + if (v.end_image_url) input.last_frame_url = v.end_image_url; + } else if (v.image_url) { + input.image_url = v.image_url; + } + + if (v.video_url) input.video_url = v.video_url; + if (v.audio_url) input.audio_url = v.audio_url; + + return input; +} + +/** + * POST /api/content/video + * + * @param request - Incoming request with video generation parameters. + * @returns JSON with the generated video URL. + */ +export async function createVideoHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, createVideoBodySchema); + if (validated instanceof NextResponse) return validated; + + const falError = configureFal(); + if (falError) return falError; + + try { + const tpl = validated.template ? loadTemplate(validated.template) : null; + + let promptOverride = validated.prompt; + if (!promptOverride && tpl?.video) { + const parts: string[] = []; + if (tpl.video.movements.length) { + parts.push(tpl.video.movements[Math.floor(Math.random() * tpl.video.movements.length)]); + } + if (tpl.video.moods.length) { + parts.push(tpl.video.moods[Math.floor(Math.random() * tpl.video.moods.length)]); + } + if (parts.length) promptOverride = parts.join(". "); + } + + const mode = validated.mode ?? inferMode(validated); + const model = validated.model ?? MODELS[mode] ?? MODELS.prompt; + const input = buildFalInput(mode, { ...validated, prompt: promptOverride ?? validated.prompt }); + + const result = await fal.subscribe(model, { input }); + const resultData = result.data as Record; + const videoUrl = (resultData?.video as Record)?.url as string | undefined; + + if (!videoUrl) { + return NextResponse.json( + { status: "error", error: "Video generation returned no video" }, + { status: 502, headers: getCorsHeaders() }, + ); + } + + return NextResponse.json({ videoUrl, mode }, { status: 200, headers: getCorsHeaders() }); + } catch (error) { + console.error("Video generation error:", error); + return NextResponse.json( + { status: "error", error: "Video generation failed" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/editHandler.ts b/lib/content/primitives/editHandler.ts new file mode 100644 index 00000000..f6e4effc --- /dev/null +++ b/lib/content/primitives/editHandler.ts @@ -0,0 +1,52 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { validateAuthContext } from "@/lib/auth/validateAuthContext"; +import { triggerPrimitive } from "@/lib/trigger/triggerPrimitive"; +import { validatePrimitiveBody } from "./validatePrimitiveBody"; +import { editBodySchema } from "./schemas"; +import { loadTemplate } from "@/lib/content/templates"; + +/** + * PATCH /api/content + * + * @param request - Incoming request with media inputs and edit operations. + * @returns JSON with the triggered run ID. + */ +export async function editHandler(request: NextRequest): Promise { + const authResult = await validateAuthContext(request); + if (authResult instanceof NextResponse) return authResult; + + const validated = await validatePrimitiveBody(request, editBodySchema); + if (validated instanceof NextResponse) return validated; + + try { + let operations = validated.operations; + + if (!operations && validated.template) { + const tpl = loadTemplate(validated.template); + if (tpl?.edit.operations) { + operations = tpl.edit.operations; + } + } + + const handle = await triggerPrimitive("create-render", { + videoUrl: validated.video_url, + audioUrl: validated.audio_url, + operations, + outputFormat: validated.output_format, + accountId: authResult.accountId, + }); + + return NextResponse.json( + { runId: handle.id, status: "triggered" }, + { status: 202, headers: getCorsHeaders() }, + ); + } catch (error) { + console.error("Failed to trigger edit:", error); + return NextResponse.json( + { status: "error", error: "Failed to trigger edit task" }, + { status: 500, headers: getCorsHeaders() }, + ); + } +} diff --git a/lib/content/primitives/schemas.ts b/lib/content/primitives/schemas.ts new file mode 100644 index 00000000..f4a5f304 --- /dev/null +++ b/lib/content/primitives/schemas.ts @@ -0,0 +1,124 @@ +import { z } from "zod"; +import { CAPTION_LENGTHS } from "@/lib/content/captionLengths"; + +export const createImageBodySchema = z.object({ + template: z.string().optional(), + prompt: z.string().optional(), + reference_image_url: z.string().url().optional(), + images: z.array(z.string().url()).optional(), + num_images: z.number().int().min(1).max(4).optional().default(1), + aspect_ratio: z + .enum([ + "auto", + "21:9", + "16:9", + "3:2", + "4:3", + "5:4", + "1:1", + "4:5", + "3:4", + "2:3", + "9:16", + "4:1", + "1:4", + "8:1", + "1:8", + ]) + .optional() + .default("auto"), + resolution: z.enum(["0.5K", "1K", "2K", "4K"]).optional().default("1K"), + model: z.string().optional(), +}); + +export const createVideoBodySchema = z.object({ + template: z.string().optional(), + mode: z.enum(["prompt", "animate", "reference", "extend", "first-last", "lipsync"]).optional(), + prompt: z.string().optional(), + image_url: z.string().url().optional(), + end_image_url: z.string().url().optional(), + video_url: z.string().url().optional(), + audio_url: z.string().url().optional(), + aspect_ratio: z.enum(["auto", "16:9", "9:16"]).optional().default("auto"), + duration: z.enum(["4s", "6s", "7s", "8s"]).optional().default("8s"), + resolution: z.enum(["720p", "1080p", "4k"]).optional().default("720p"), + negative_prompt: z.string().optional(), + generate_audio: z.boolean().optional().default(false), + model: z.string().optional(), +}); + +export const createTextBodySchema = z.object({ + template: z.string().optional(), + topic: z.string().min(1), + length: z.enum(CAPTION_LENGTHS).optional().default("short"), +}); + +export const createAudioBodySchema = z.object({ + audio_urls: z.array(z.string().url()).min(1), + language: z.string().optional().default("en"), + chunk_level: z.enum(["none", "segment", "word"]).optional().default("word"), + diarize: z.boolean().optional().default(false), + model: z.string().optional(), +}); + +export const editOperationSchema = z.discriminatedUnion("type", [ + z.object({ + type: z.literal("trim"), + start: z.number().nonnegative(), + duration: z.number().positive(), + }), + z.object({ + type: z.literal("crop"), + aspect: z.string().optional(), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }), + z.object({ + type: z.literal("resize"), + width: z.number().int().positive().optional(), + height: z.number().int().positive().optional(), + }), + z.object({ + type: z.literal("overlay_text"), + content: z.string().min(1), + font: z.string().optional(), + color: z.string().optional().default("white"), + stroke_color: z.string().optional().default("black"), + max_font_size: z.number().positive().optional().default(42), + position: z.enum(["top", "center", "bottom"]).optional().default("bottom"), + }), + z.object({ + type: z.literal("mux_audio"), + audio_url: z.string().url(), + replace: z.boolean().optional().default(true), + }), +]); + +export const editBodySchema = z + .object({ + video_url: z.string().url().optional(), + audio_url: z.string().url().optional(), + template: z.string().optional(), + operations: z.array(editOperationSchema).optional(), + output_format: z.enum(["mp4", "webm", "mov"]).optional().default("mp4"), + }) + .refine(data => data.video_url || data.audio_url, { + message: "Must provide at least one input (video_url or audio_url)", + }) + .refine(data => data.template || (data.operations && data.operations.length > 0), { + message: "Must provide either template or operations", + }); + +export const createUpscaleBodySchema = z.object({ + url: z.string().url(), + type: z.enum(["image", "video"]), + upscale_factor: z.number().min(1).max(4).optional().default(2), + target_resolution: z.enum(["720p", "1080p", "1440p", "2160p"]).optional(), +}); + +export const createAnalyzeBodySchema = z.object({ + video_url: z.string().url(), + prompt: z.string().min(1).max(2000), + temperature: z.number().min(0).max(1).optional().default(0.2), + max_tokens: z.number().int().min(1).max(4096).optional(), +}); diff --git a/lib/content/primitives/validatePrimitiveBody.ts b/lib/content/primitives/validatePrimitiveBody.ts new file mode 100644 index 00000000..a9d94c68 --- /dev/null +++ b/lib/content/primitives/validatePrimitiveBody.ts @@ -0,0 +1,32 @@ +import type { NextRequest } from "next/server"; +import { NextResponse } from "next/server"; +import type { z } from "zod"; +import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; +import { safeParseJson } from "@/lib/networking/safeParseJson"; + +/** + * Parses and validates the request body against a Zod schema. + * Shared by all content primitive endpoints. + * Auth is handled separately by each handler via validateAuthContext. + * + * @param request - Incoming Next.js request (body read as JSON). + * @param schema - Zod schema for the expected JSON body shape. + * @returns Validated parsed data, or a NextResponse error. + */ +export async function validatePrimitiveBody( + request: NextRequest, + schema: z.ZodSchema, +): Promise { + const body = await safeParseJson(request); + const result = schema.safeParse(body); + + if (!result.success) { + const firstError = result.error.issues[0]; + return NextResponse.json( + { status: "error", field: firstError.path, error: firstError.message }, + { status: 400, headers: getCorsHeaders() }, + ); + } + + return result.data; +} diff --git a/lib/content/templates/album-record-store.json b/lib/content/templates/album-record-store.json new file mode 100644 index 00000000..d2db7e2b --- /dev/null +++ b/lib/content/templates/album-record-store.json @@ -0,0 +1,108 @@ +{ + "id": "album-record-store", + "description": "Vinyl record on display in a NYC record store. No artist on camera — product shot of the album. Promotional captions. Vertical 9:16 video, 8 seconds. Best for: release day, album promotion, single drops. Requires: audio. No face image needed.", + "image": { + "prompt": "A vinyl record spinning on a turntable inside a cramped, rundown New York City record store. The album cover art is displayed next to the turntable, propped against a stack of records. Wooden crate bins full of vinyl records fill the background. Warm tungsten overhead light, dust particles visible in the air. The store feels lived-in — peeling stickers on the counter, handwritten price tags, faded band posters on the walls. Phone camera, slightly warm color cast.", + "reference_images": [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAxLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.4_aouIYxW9jSZb6U9S_XOgygyVS4Nqg4uPJ0l5qNEz8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAyLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.FcKfpm79HH-cx4NIW_-EJJ7qaxM-LY-Ea72EF3U5zIU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTAzLnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.Dos9-VI40yCviZNSYRPcc0Owz9QJs1vHvmQ2ptFOCXs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA0LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.Dvk_unwcGS63a-VreepJf3Pm4nm4kYCL0-lThxUkL34", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA1LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.KCvBqIkjVmAKj4xoU3y5txw2mNwWl88cbj7Ln0u8v68", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA2LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.BIGZ2WG15ecaodHkQ5aSprIGbFnXBjqBH62r_vdZ7Eg", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA3LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.88e5hWeqa7d1vLhN4KnsGNKV1JXiU9a0zWHZtELJ9DE", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA4LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.9MldLiE0pSW9smN402wQ-xewLBkNUNImn6hzoHY5zwU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/album-record-store/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hbGJ1bS1yZWNvcmQtc3RvcmUvcmVmLTA5LnBuZyIsImlhdCI6MTc3NTE4NTA1NywiZXhwIjoxODA2NzIxMDU3fQ.p7iStudC3RxtBA_hZUP3sz5dOOtVAkVa9iDFB7ItwDU" + ], + "style_rules": { + "camera": { + "type": "iPhone resting on the counter, recording a quick story", + "angle": "slightly above the turntable, looking down at an angle — like someone held their phone over the record to film it spinning", + "quality": "iPhone video quality — warm color cast from the overhead light, slight lens flare, not perfectly sharp, natural vignetting at corners", + "focus": "turntable and album art in focus, background bins and shelves slightly soft" + }, + "environment": { + "feel": "a real independent record store in lower Manhattan or Brooklyn — cramped, cluttered, full of character", + "lighting": "warm tungsten bulbs overhead, maybe a small desk lamp near the register. Pools of warm light, deep shadows between the bins. Dust particles catching the light.", + "backgrounds": "wooden crate bins overflowing with vinyl, hand-lettered genre dividers, faded concert posters and stickers on every surface, a boombox or old speakers on a high shelf, maybe a cat sleeping on a stack of records", + "avoid": "clean modern stores, bright fluorescent lighting, empty shelves, corporate branding, pristine surfaces, anything that looks new or staged" + }, + "subject": { + "expression": "N/A — no person in the shot, the subject is the album and turntable", + "pose": "N/A", + "clothing": "N/A", + "framing": "turntable takes up the lower half of frame, album art visible in the upper portion or to the side, surrounded by the store environment" + }, + "realism": { + "priority": "this MUST look like a real phone video taken inside an actual NYC record store, not a render or AI image", + "texture": "warm grain from the phone camera, slight dust and scratches visible on the vinyl, wood grain on the crate bins, worn edges on the record sleeves", + "imperfections": "fingerprints on the vinyl, slightly crooked album display, a price sticker on the sleeve, dust on the turntable platter, uneven stacks of records in the background", + "avoid": "clean renders, perfect symmetry, bright even lighting, glossy surfaces, anything that looks digital or AI-generated, stock-photo record stores" + } + } + }, + "video": { + "moods": [ + "warm nostalgia, like walking into a place that reminds you of being a kid", + "quiet pride, the feeling of seeing something you made exist in the real world", + "intimate, like youre showing a close friend something that matters to you", + "reverent, the way people handle vinyl carefully because it feels sacred", + "bittersweet, like the album captured a version of you that doesnt exist anymore", + "hypnotic, the kind of calm that comes from watching something spin in circles", + "peaceful solitude, alone in the store after hours", + "wistful, like remembering the sessions that made this album" + ], + "movements": [ + "the vinyl spins steadily, tonearm tracking the groove, dust particles drift through the warm light", + "camera slowly drifts closer to the album art, the vinyl keeps spinning in the background", + "a hand reaches into frame and gently places the needle on the record", + "the turntable spins, the overhead light flickers once, dust motes float lazily", + "someone flips through records in a crate in the background, out of focus, while the vinyl spins", + "the camera barely moves, just the vinyl spinning and the warm light shifting slightly", + "a slight camera drift to reveal more of the store — bins, posters, clutter — then settles back on the turntable", + "the tonearm rides the groove, a tiny reflection of light glints off the spinning vinyl surface" + ] + }, + "caption": { + "guide": { + "templateStyle": "album art on vinyl in a record store — the kind of post an artist makes when their music hits wax for the first time", + "captionRole": "the caption should feel like the artist posted this themselves. proud but not corny. announcing the vinyl, reflecting on the music, or saying something raw about what the album means.", + "tone": "understated pride, like posting a photo of your album in a store and letting the moment speak for itself. not hype-man energy — quiet flex.", + "rules": [ + "lowercase only", + "keep it under 80 characters for short, can go longer for medium/long", + "no punctuation at the end unless its a question mark", + "never sound like a press release or marketing copy", + "never say 'out now' or 'stream now' or 'link in bio'", + "dont describe whats in the image", + "can reference the album, the songs, or what they mean to you", + "can reference the physical vinyl / record store experience", + "if it sounds like a label wrote it, rewrite it until it sounds like the artist texted it to a friend" + ], + "formats": [ + "a one-line reflection on the album ('i left everything in this one')", + "a quiet flex about being on vinyl ('never thought id see this in a store')", + "a nostalgic moment ('used to dig through bins like this looking for something that felt like home')", + "something the listener would screenshot ('this album is the version of me i was scared to show you')", + "a short dedication or thank you that feels real, not performative" + ] + }, + "examples": [ + "i left everything in this one", + "found myself in the crates today", + "never thought id see my name on a spine in a record store", + "wrote this in my bedroom now its on wax", + "this album is the version of me i was scared to show you", + "every scratch on this vinyl is a memory", + "the songs sound different on wax. heavier somehow", + "somebody in new york is gonna find this in a bin one day and feel something" + ] + }, + "edit": { + "operations": [ + { "type": "crop", "aspect": "9:16" }, + { "type": "overlay_text", "color": "white", "stroke_color": "black", "position": "bottom", "max_font_size": 42 }, + { "type": "mux_audio", "replace": true } + ] + } +} diff --git a/lib/content/templates/artist-caption-bedroom.json b/lib/content/templates/artist-caption-bedroom.json new file mode 100644 index 00000000..02cbbf51 --- /dev/null +++ b/lib/content/templates/artist-caption-bedroom.json @@ -0,0 +1,115 @@ +{ + "id": "artist-caption-bedroom", + "description": "Moody bedroom selfie. Artist on camera with deadpan expression, purple LED lighting, dark room. Short blunt captions in lowercase. Vertical 9:16 video, 8 seconds. Best for: introspective songs, vulnerable moments, daily content. Requires: face image, audio.", + "image": { + "prompt": "A candid front-facing selfie INSIDE A BEDROOM. The person is sitting on an unmade bed or at a desk in their bedroom. Purple LED strip lights glow on the wall behind them. The room is dark with only the purple glow illuminating their face. Phone camera, low light, grainy. Wearing a hoodie, deadpan expression. The setting MUST be indoors in a real bedroom, not outside.", + "reference_images": [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMS5wbmciLCJpYXQiOjE3NzUxODUwNTIsImV4cCI6MTgwNjcyMTA1Mn0.LNONuOqaksZeatR8sFGLLlj3d3QWQ1bhETrANiv5VFo", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMi5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.fmcN6QprMwpHMuVEM72XQ9DZwWC49zfwwB5Hk1DT2_c", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wMy5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.7kRSqn7nnhYmymnOeSf2d8fGTWNWpu87EUL56MTXkkc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNC5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.0xTWb46WAqPSWheoRnyeSKccMiIVLglio3NZPnh3Cb0", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNS5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.lYNYQ-NPuvt2jYxei33DRrblLRvd_ksaswH9rBgEccI", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNi5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.q3xfJzyINgd68YJyYaII55y3gFUKDb0vSr4uueNSys0", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wNy5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.2sIZZARH7N5cm4PG_4Y7KOepbrNZXqTt5rdghN-7oIA", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wOC5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.Rnjr7owp6zoz-RSuBsdgLVvs2xo3uzASAoCvXyn-CKc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0wOS5wbmciLCJpYXQiOjE3NzUxODUwNTMsImV4cCI6MTgwNjcyMTA1M30.6jasZ_PBNu7p-rLM7jgzEXe2GwuTsdpNNG9_FOupgXY", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMC5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.hjlEdopp4MstfHLpTl84T2ev54ecedUVsiYXSaV3AP4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-11.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.E8Sp_BSQqzVMGxx5t4SVYKiT3_CnTxPcvqRcEnRB6rU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-12.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.ePlhmDPm2LuK2TD7mDgnO7ta0k_cdV8mWF8kwBR3y9k", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-bedroom/ref-13.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1iZWRyb29tL3JlZi0xMy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.fe2N42_2A7jj8m-SD3TUel0-wvaOrWn2XiARHLmHp00" + ], + "style_rules": { + "camera": { + "type": "front-facing phone camera", + "angle": "held slightly below face, looking down at lens", + "quality": "phone camera in low light, slight noise and grain, not DSLR sharp", + "focus": "face in focus, background slightly soft but not artificially blurred" + }, + "environment": { + "feel": "real, uncontrolled, wherever they happen to be", + "lighting": "soft, dim purple glow from a desk lamp or LED strip — barely illuminating the room, heavy shadows, most of the frame is dark, only the face catches light", + "backgrounds": "real lived-in bedroom — unmade bed, plain walls, ceiling vents, clutter, nothing curated or staged", + "avoid": "clean renders, perfect symmetry, stock-photo rooms, AI-looking environments, smooth surfaces, studio backdrops" + }, + "subject": { + "expression": "deadpan, slightly bored, vulnerable, not smiling for the camera", + "pose": "casual — hand in hair, hood up, slouched, not posed or performative", + "clothing": "oversized hoodie, sweater, or dark casual top", + "framing": "head and shoulders, close crop, face takes up most of the frame" + }, + "realism": { + "priority": "the image must look like a real phone photo, not AI-generated", + "texture": "grainy, slightly noisy, imperfect skin texture visible", + "imperfections": "messy hair, wrinkled fabric, uneven lighting, random objects in background", + "avoid": "smooth skin, perfect hair, symmetrical composition, clean backgrounds, hyper-sharp detail, uncanny valley" + } + } + }, + "video": { + "moods": [ + "numb, checked out, staring through the camera not at it", + "melancholy, like they just remembered something they were trying to forget", + "quietly amused, like they heard a joke only they understand", + "vulnerable, guard is down, too tired to pretend", + "bored but in a way thats almost peaceful", + "restless, like they want to say something but wont", + "defiant, calm anger, daring you to say something", + "lonely but pretending theyre fine", + "soft, gentle, like theyre about to whisper a secret", + "dissociating, physically present but mentally somewhere else" + ], + "movements": [ + "nearly still, only natural breathing", + "the very corner of their mouth barely lifts into the faintest smirk", + "eyes slowly drift up and to the side like thinking about something", + "very slowly tilts head slightly to one side", + "trying to stay deadpan but fighting a smile, lips press together", + "slow quiet exhale through nose, shoulders drop slightly", + "glances away from camera for a moment then slowly looks back", + "jaw tightens slightly like holding something in", + "one eyebrow raises just barely, like a silent question", + "chest rises and falls in one visible sigh" + ] + }, + "caption": { + "guide": { + "templateStyle": "deadpan selfie with music playing — artist staring at camera, too cool to care", + "captionRole": "the caption is the hook. short, blunt, makes someone stop scrolling. inspired by the songs vibe, not a quote from it.", + "tone": "deadpan, low effort, like you typed it with one thumb while bored", + "rules": [ + "lowercase only", + "SHORTER IS ALWAYS BETTER. aim for 20-50 characters. never exceed 60", + "no punctuation at the end", + "no apostrophes or quotes — write whats up not what's up, write dont not don't", + "never promotional", + "never describe whats in the video", + "dont quote the lyrics directly — riff on the vibe instead", + "dont try to be clever or poetic. be blunt and simple", + "if it sounds like an AI wrote it, its too long and too try-hard. simplify", + "think: what would a bored teenager type as a caption in 3 seconds" + ], + "formats": [ + "a blunt confession (6-10 words max)", + "a 'date idea:' or 'pov:' setup (keep it short)", + "a self-deprecating one-liner", + "a hyper-specific relatable moment in as few words as possible", + "something dumb that somehow hits hard" + ] + }, + "examples": [ + "i still keep our photos in the hidden folder in my camera roll in case you come back to me", + "i'm touring 14 cities in north america this summer (i'm just looking for the girl i wrote my songs abt cause she won't text me back)", + "date idea: we erase our past and fall back in love so i can unwrite this song", + "if anyone could've saved me", + "this came out 8 months ago and caroline still hasn't texted me back", + "it's always 'imy' and never 'islfyiebinfy'" + ] + }, + "edit": { + "operations": [ + { "type": "crop", "aspect": "9:16" }, + { "type": "overlay_text", "color": "white", "stroke_color": "black", "position": "bottom", "max_font_size": 42 }, + { "type": "mux_audio", "replace": true } + ] + } +} diff --git a/lib/content/templates/artist-caption-outside.json b/lib/content/templates/artist-caption-outside.json new file mode 100644 index 00000000..05d6fed2 --- /dev/null +++ b/lib/content/templates/artist-caption-outside.json @@ -0,0 +1,123 @@ +{ + "id": "artist-caption-outside", + "description": "Night street scene. Artist on camera, phone-on-ground angle, urban cinematic feel. Confident short captions. Vertical 9:16 video, 8 seconds. Best for: confident tracks, urban energy, night vibes. Requires: face image, audio.", + "image": { + "prompt": "A person standing outside at night, phone propped on the ground filming them. Low angle, full body shot. Street lights and city glow. Real phone footage feel, slightly shaky framing.", + "reference_images": [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.xV77akF4oFtZGjCkn1roI9M9vPGE96Ux_ZvT5wWgEKA", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.EljTa5aA6egBf4KXPFCjwsZojOZ7S9QgOEyIiH9HjKE", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wMy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.pi6r-0q6cxRwbYMso0h5LtacMonbcEUJYtuLoOJdWdU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNC5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.uCQaIDaLv2YM7wMf-6LnfJh3r_A8pu-7i3FNjuQHRUs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNS5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.EA3lTITRof9pSUJ3KxzK9ZgYEIsWkGXPcPMSCGDVfHg", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNi5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.qXjexkFDzRrPvYso-_WJUH66No1PXUzNow7jdEw04cc", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wNy5wbmciLCJpYXQiOjE3NzUxODUwNTQsImV4cCI6MTgwNjcyMTA1NH0.2oExeNxOGr7KEEo5zWThgZWaZhJnnooPWsXj6Gp_4jU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wOC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.LDCXObRzgYJSPs4IoXtY9pinb1gCO1iVgb9-uX-JMv8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0wOS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.WD5xCYsI3klZHS2cVsrXW6T_x7bdVku22EdD7qkazDs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.VfN889NyKAPLKDT6IQVTRzLH4_cegNUGuX3P3bN4oy4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-11.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Z1IQGbIeKombxFIAO-Y2YqYF1s8MBsggx1JR1_oFshM", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-12.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMi5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Ch498MgcnLZcUOAESkbwulqS30ZJn5cL0sCLknsB8es", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-13.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xMy5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.Euiy_gmg3dXaafDS1MCm_IGV3SDvyOmWUja13SffxqQ", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-14.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNC5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.RvaxLUBmArSzTjDAzOcSpF3VUfxPIBw98nmNt5f2zjU", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-15.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNS5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.UA30E9V-f-euLuAlWyFKt6zoR7J9BAfUdOzuz7-gNJY", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-outside/ref-16.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1vdXRzaWRlL3JlZi0xNi5wbmciLCJpYXQiOjE3NzUxODUwNTUsImV4cCI6MTgwNjcyMTA1NX0.s6kmLCjl87FSBGbQ25fGr41YsWndLgot-Spc01WLYxo" + ], + "style_rules": { + "camera": { + "type": "iPhone propped on the sidewalk, recording video", + "angle": "very low, ground level, looking up at the person. Slightly tilted because the phone is leaning against something", + "quality": "iPhone night mode video — auto-exposure pumping, digital noise everywhere, slight purple fringing on bright lights, compressed quality" + }, + "environment": { + "feel": "somewhere outside at night, wherever they happen to be — doesn't matter where", + "lighting": "whatever light sources are nearby — street lamps, porch lights, car headlights, neon signs, gas station lights. Uneven, one-directional, casting harsh shadows. Not controlled.", + "backgrounds": "real places — parking lot, sidewalk, driveway, park, alley, outside a store, under a street light, by a fence. Blurry background details, messy and unplanned.", + "avoid": "daytime, even lighting, clean or curated backgrounds, professional photography, perfectly exposed, obviously staged locations" + }, + "subject": { + "expression": "deadpan, unbothered, too cool to care about the camera", + "pose": "full body, standing naturally, weight on one leg, hands in pockets or at sides, not posing", + "clothing": "dark oversized hoodie or jacket, baggy jeans or cargo pants, dark shoes", + "framing": "full body visible head to toe, person takes up about 50-60% of the frame height, space around them, ground visible at bottom" + }, + "realism": { + "priority": "MUST look like a real iPhone video screenshot, not AI. if it looks clean or polished it has failed", + "texture": "heavy digital noise in all dark areas, visible JPEG artifacts, color banding in the sky, slight motion blur on any movement", + "imperfections": "lens flare streaking across frame from street lights, blown out highlights that are pure white, slightly warm color cast from sodium lamps, the ground has texture and cracks, shadows are noisy not smooth", + "avoid": "clean noise-free images, perfect skin, sharp focus on everything, symmetrical composition, studio quality, smooth gradients, any sign of AI generation, evenly lit scenes" + } + } + }, + "video": { + "moods": [ + "numb, checked out, staring through the camera not at it", + "melancholy, like they just remembered something they were trying to forget", + "quietly amused, like they heard a joke only they understand", + "vulnerable, guard is down, too tired to pretend", + "bored but in a way that's almost peaceful", + "restless, like they want to say something but won't", + "defiant, calm anger, daring you to say something", + "lonely but pretending they're fine", + "soft, gentle, like they're about to whisper a secret", + "dissociating, physically present but mentally somewhere else" + ], + "movements": [ + "standing still with hands in pockets, staring at the camera", + "slowly turns around so their back faces the camera", + "looks down at the ground and kicks at it with their shoe", + "does a small shrug like whatever", + "blows a bubble with gum", + "slowly puts their hood up", + "does a slow lazy spin", + "waves at the camera sarcastically", + "starts to walk away, stops, looks back", + "sits down on the ground cross legged", + "leans against a wall with arms crossed", + "throws up a peace sign without changing expression", + "tosses something small in the air and catches it", + "mouths the words to the song playing", + "zones out looking up at the sky", + "pulls out phone, looks at it, puts it back" + ] + }, + "caption": { + "guide": { + "templateStyle": "deadpan selfie with music playing — artist staring at camera, too cool to care", + "captionRole": "the caption is the hook. short, blunt, makes someone stop scrolling. inspired by the songs vibe, not a quote from it.", + "tone": "deadpan, low effort, like you typed it with one thumb while bored", + "rules": [ + "lowercase only", + "SHORTER IS ALWAYS BETTER. aim for 20-50 characters. never exceed 60", + "no punctuation at the end", + "no apostrophes or quotes — write whats up not what's up, write dont not don't", + "never promotional", + "never describe whats in the video", + "dont quote the lyrics directly — riff on the vibe instead", + "dont try to be clever or poetic. be blunt and simple", + "if it sounds like an AI wrote it, its too long and too try-hard. simplify", + "think: what would a bored teenager type as a caption in 3 seconds" + ], + "formats": [ + "a blunt confession (6-10 words max)", + "a 'date idea:' or 'pov:' setup (keep it short)", + "a self-deprecating one-liner", + "a hyper-specific relatable moment in as few words as possible", + "something dumb that somehow hits hard" + ] + }, + "examples": [ + "i still keep our photos in the hidden folder in my camera roll in case you come back to me", + "i'm touring 14 cities in north america this summer (i'm just looking for the girl i wrote my songs abt cause she won't text me back)", + "date idea: we erase our past and fall back in love so i can unwrite this song", + "if anyone could've saved me", + "this came out 8 months ago and caroline still hasn't texted me back", + "it's always 'imy' and never 'islfyiebinfy'" + ] + }, + "edit": { + "operations": [ + { "type": "crop", "aspect": "9:16" }, + { "type": "overlay_text", "color": "white", "stroke_color": "black", "position": "bottom", "max_font_size": 42 }, + { "type": "mux_audio", "replace": true } + ] + } +} diff --git a/lib/content/templates/artist-caption-stage.json b/lib/content/templates/artist-caption-stage.json new file mode 100644 index 00000000..385f67b5 --- /dev/null +++ b/lib/content/templates/artist-caption-stage.json @@ -0,0 +1,87 @@ +{ + "id": "artist-caption-stage", + "description": "Small venue fan cam. Artist on camera from crowd perspective, performance energy. Hype short captions. Vertical 9:16 video, 8 seconds. Best for: upbeat songs, live feel, hype moments. Requires: face image, audio.", + "image": { + "prompt": "A person performing on a small stage at a live show. Fan cam perspective — phone held up in the crowd. Stage lights, slightly blurry, not professional photography.", + "reference_images": [ + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-01.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDEucG5nIiwiaWF0IjoxNzc1MTg1MDU1LCJleHAiOjE4MDY3MjEwNTV9.Ff9Olh-7AH9hpGsnoNjm137i_z5QasP6W6fkd7UgXHs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-02.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDIucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.5h8pm3f3ns8UOpRII5klLBY6hjyNKc4eln-y2RhOoZw", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-03.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDMucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.Zth40VhNl3aV-IXcRdNrVpJxfDnG9OX8d0lhd3iYUW8", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-04.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDQucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.SVMtgCM9TJ0DEJPB6mXfhu6lLI5ttjpCNNUmyntToTs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-05.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDUucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.zOthD-7e3-TrRbwygF9ydyAJnycli6ewj8sd_xpHYBs", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-06.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDYucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.4NYpj1wRqwFLf5i_k_vrw8CSg6tTf_kkvaIafwbTfdw", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-07.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDcucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9._4ytmg9RN6SR_M6Eo0mNc_kYG5XkCPKp50ApqMg6qq4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-08.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDgucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.QI2pPs1lDDOHN-BqeSjNm8Fu0TJJwOagcDKCXyb1AqQ", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-09.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMDkucG5nIiwiaWF0IjoxNzc1MTg1MDU2LCJleHAiOjE4MDY3MjEwNTZ9.rDvcjb4DhlC8w7ehpgvL8x7PScPfiQaUQg56vpIIy-4", + "https://godremdqwajrwazhbrue.supabase.co/storage/v1/object/sign/user-files/templates/artist-caption-stage/ref-10.png?token=eyJraWQiOiJzdG9yYWdlLXVybC1zaWduaW5nLWtleV84MzUzZTIyMy04YWU5LTQxMDYtOWZiYi04Y2NhMjE3NDc5YWUiLCJhbGciOiJIUzI1NiJ9.eyJ1cmwiOiJ1c2VyLWZpbGVzL3RlbXBsYXRlcy9hcnRpc3QtY2FwdGlvbi1zdGFnZS9yZWYtMTAucG5nIiwiaWF0IjoxNzc1MTg1MDU3LCJleHAiOjE4MDY3MjEwNTd9.oQ4VKoltTJJPSQMfJ8E0mEh1mtDXN0JigntzoIhmPo8" + ], + "style_rules": { + "camera": { + "type": "iPhone held up in a crowd recording a concert", + "angle": "slightly below stage level, looking up at performer, not perfectly centered", + "quality": "iPhone video screenshot quality — compressed, noisy, not sharp. Digital noise in dark areas. Slight purple fringing on highlights." + }, + "environment": { + "feel": "cramped small venue, sweaty, dark, someone's phone screen glowing in the corner", + "lighting": "harsh stage spots from above — blown out orange and red highlights, deep black shadows, face half in darkness. Light spill is uneven and messy.", + "backgrounds": "out of focus crowd silhouettes, blurry stage equipment, maybe a phone screen or two glowing in the audience, exit sign in the distance", + "avoid": "even lighting, clean backgrounds, arena-sized venues, professional concert photography, perfectly exposed images, visible detail in dark areas" + }, + "subject": { + "expression": "mid-performance — eyes closed singing, chin up, lost in the music", + "pose": "holding mic close, one hand up, or gripping mic stand, slightly blurry from movement", + "clothing": "dark — black hoodie, dark jacket, nothing bright or styled", + "framing": "not perfectly framed — subject slightly off center, maybe someone's head partially blocking the bottom, cropped awkwardly like a real phone photo" + }, + "realism": { + "priority": "this MUST look like a screenshot from someone's iPhone concert video, not a professional photo or AI image", + "texture": "heavy digital noise in shadows, JPEG compression artifacts, slight color banding in gradients, skin has no retouching", + "imperfections": "lens flare bleeding across frame, blown out stage light spots that are pure white, someone's hand or phone slightly visible at edge of frame, chromatic aberration on bright lights, slight motion blur on performer's hands", + "avoid": "clean noise-free images, perfect skin, sharp focus on everything, symmetrical composition, studio quality, any sign of AI generation" + } + } + }, + "video": { + "moods": [], + "movements": [] + }, + "caption": { + "guide": { + "templateStyle": "live performance with emotional or lyric caption — the artist on stage with words that hit", + "captionRole": "the caption adds emotional weight to the image. it can be a lyric, a question, a confession, or a thought that makes the viewer feel something while looking at the performance", + "tone": "raw, emotional, vulnerable, poetic — like the artist is speaking directly to one person in the crowd", + "rules": [ + "lowercase only", + "max 100 characters (can be longer than casual template since its more emotional)", + "apostrophes are allowed (im, youre, dont all ok — but also i'm, you're, don't all ok)", + "question marks are allowed", + "never promotional", + "never describe what's in the image", + "can be a direct lyric quote from the song", + "can be a rhetorical question", + "should feel like the artist is saying it mid-performance" + ], + "formats": [ + "a lyric line that hits hardest out of context", + "a rhetorical question directed at someone specific", + "a confession that feels too honest for a stage", + "a one-line gut punch", + "something that makes you screenshot and send to someone" + ] + }, + "examples": [ + "how can you look at me and pretend i'm someone you've never met?", + "i wrote this song about you and you don't even know", + "every time i sing this part i think about leaving", + "this is the last song i'll ever write about you", + "i hope you hear this and it ruins your whole night" + ] + }, + "edit": { + "operations": [ + { "type": "crop", "aspect": "9:16" }, + { "type": "overlay_text", "color": "white", "stroke_color": "black", "position": "bottom", "max_font_size": 42 }, + { "type": "mux_audio", "replace": true } + ] + } +} diff --git a/lib/content/templates/index.ts b/lib/content/templates/index.ts new file mode 100644 index 00000000..2650d17c --- /dev/null +++ b/lib/content/templates/index.ts @@ -0,0 +1,65 @@ +import bedroomTemplate from "./artist-caption-bedroom.json"; +import outsideTemplate from "./artist-caption-outside.json"; +import stageTemplate from "./artist-caption-stage.json"; +import recordStoreTemplate from "./album-record-store.json"; + +export interface TemplateEditOperation { + type: string; + [key: string]: unknown; +} + +export interface Template { + id: string; + description: string; + image: { + prompt: string; + reference_images: string[]; + style_rules: Record>; + }; + video: { + moods: string[]; + movements: string[]; + }; + caption: { + guide: { + templateStyle?: string; + captionRole?: string; + tone: string; + rules: string[]; + formats: string[]; + }; + examples: string[]; + }; + edit: { + operations: TemplateEditOperation[]; + }; +} + +const TEMPLATES: Record = { + "artist-caption-bedroom": bedroomTemplate as unknown as Template, + "artist-caption-outside": outsideTemplate as unknown as Template, + "artist-caption-stage": stageTemplate as unknown as Template, + "album-record-store": recordStoreTemplate as unknown as Template, +}; + +/** + * Load a template by ID. Returns null if not found. + * + * @param id - Template identifier. + * @returns The full template config, or null. + */ +export function loadTemplate(id: string): Template | null { + return TEMPLATES[id] ?? null; +} + +/** + * List all available templates with id and description only. + * + * @returns Array of template summaries. + */ +export function listTemplates(): { id: string; description: string }[] { + return Object.values(TEMPLATES).map(t => ({ + id: t.id, + description: t.description, + })); +} diff --git a/lib/content/validateCreateContentBody.ts b/lib/content/validateCreateContentBody.ts index 80049ce1..dc303f09 100644 --- a/lib/content/validateCreateContentBody.ts +++ b/lib/content/validateCreateContentBody.ts @@ -4,10 +4,7 @@ import { z } from "zod"; import { getCorsHeaders } from "@/lib/networking/getCorsHeaders"; import { safeParseJson } from "@/lib/networking/safeParseJson"; import { validateAuthContext } from "@/lib/auth/validateAuthContext"; -import { - DEFAULT_CONTENT_TEMPLATE, - isSupportedContentTemplate, -} from "@/lib/content/contentTemplates"; +import { isSupportedContentTemplate } from "@/lib/content/contentTemplates"; import { resolveArtistSlug } from "@/lib/content/resolveArtistSlug"; import { songsSchema } from "@/lib/content/songsSchema"; @@ -17,11 +14,7 @@ export const createContentBodySchema = z.object({ artist_account_id: z .string({ message: "artist_account_id is required" }) .uuid("artist_account_id must be a valid UUID"), - template: z - .string() - .min(1, "template cannot be empty") - .optional() - .default(DEFAULT_CONTENT_TEMPLATE), + template: z.string().min(1, "template cannot be empty").optional(), lipsync: z.boolean().optional().default(false), caption_length: z.enum(CAPTION_LENGTHS).optional().default("short"), upscale: z.boolean().optional().default(false), @@ -34,7 +27,7 @@ export type ValidatedCreateContentBody = { accountId: string; artistAccountId: string; artistSlug: string; - template: string; + template: string | undefined; lipsync: boolean; captionLength: "short" | "medium" | "long"; upscale: boolean; @@ -72,8 +65,8 @@ export async function validateCreateContentBody( return authResult; } - const template = result.data.template ?? DEFAULT_CONTENT_TEMPLATE; - if (!isSupportedContentTemplate(template)) { + const template = result.data.template; + if (template && !isSupportedContentTemplate(template)) { return NextResponse.json( { status: "error", diff --git a/lib/content/validateGetContentEstimateQuery.ts b/lib/content/validateGetContentEstimateQuery.ts index 5828e7cc..97af7468 100644 --- a/lib/content/validateGetContentEstimateQuery.ts +++ b/lib/content/validateGetContentEstimateQuery.ts @@ -15,6 +15,8 @@ export type ValidatedGetContentEstimateQuery = z.infer { diff --git a/lib/evals/callChatFunctionsWithResult.ts b/lib/evals/callChatFunctionsWithResult.ts index a792248b..b80fcb58 100644 --- a/lib/evals/callChatFunctionsWithResult.ts +++ b/lib/evals/callChatFunctionsWithResult.ts @@ -8,6 +8,8 @@ import { ChatRequestBody } from "@/lib/chat/validateChatRequest"; * * Note: result.toolCalls only contains calls from the LAST step. When using multi-step * tool chains, we need to collect toolCalls from result.steps to capture all tool usage. + * + * @param input */ export async function callChatFunctionsWithResult(input: string) { const messages: UIMessage[] = [ diff --git a/lib/evals/createToolsCalledScorer.ts b/lib/evals/createToolsCalledScorer.ts index 1d838ee3..8a9ac7e7 100644 --- a/lib/evals/createToolsCalledScorer.ts +++ b/lib/evals/createToolsCalledScorer.ts @@ -3,6 +3,9 @@ import { ToolsCalled } from "./scorers/ToolsCalled"; /** * Creates a scorer that checks if required tools were called. * Handles extracting output text and toolCalls from the task result. + * + * @param requiredTools + * @param penalizedTools */ export const createToolsCalledScorer = (requiredTools: string[], penalizedTools: string[] = []) => { return async (args: { output: unknown; expected?: string; input: string }) => { diff --git a/lib/evals/extractTextFromResult.ts b/lib/evals/extractTextFromResult.ts index fac24cf6..dc67f3ab 100644 --- a/lib/evals/extractTextFromResult.ts +++ b/lib/evals/extractTextFromResult.ts @@ -3,6 +3,8 @@ import { extractTextResultFromSteps } from "./extractTextResultFromSteps"; /** * Extract text from a GenerateTextResult + * + * @param result */ export function extractTextFromResult(result: Awaited>): string { // Handle multi-step responses (when maxSteps > 1) diff --git a/lib/evals/extractTextResultFromSteps.ts b/lib/evals/extractTextResultFromSteps.ts index 44c0ae0d..16881677 100644 --- a/lib/evals/extractTextResultFromSteps.ts +++ b/lib/evals/extractTextResultFromSteps.ts @@ -4,6 +4,8 @@ import type { TextPart } from "ai"; /** * Extract text from multi-step GenerateTextResult * Handles responses where maxSteps > 1 + * + * @param result */ export function extractTextResultFromSteps( result: Awaited>, diff --git a/lib/evals/getCatalogSongsCountExpected.ts b/lib/evals/getCatalogSongsCountExpected.ts index 6f04e59c..d94383ef 100644 --- a/lib/evals/getCatalogSongsCountExpected.ts +++ b/lib/evals/getCatalogSongsCountExpected.ts @@ -2,6 +2,9 @@ import { getCatalogs } from "@/lib/catalog/getCatalogs"; import { getCatalogSongs } from "@/lib/catalog/getCatalogSongs"; import { EVAL_ACCOUNT_ID } from "@/lib/consts"; +/** + * + */ async function getCatalogSongsCountExpected() { try { const catalogsData = await getCatalogs(EVAL_ACCOUNT_ID); diff --git a/lib/evals/getSpotifyFollowersExpected.ts b/lib/evals/getSpotifyFollowersExpected.ts index ef96e248..f5221937 100644 --- a/lib/evals/getSpotifyFollowersExpected.ts +++ b/lib/evals/getSpotifyFollowersExpected.ts @@ -1,5 +1,9 @@ import { getSpotifyFollowers } from "@/lib/spotify/getSpotifyFollowers"; +/** + * + * @param artist + */ async function getSpotifyFollowersExpected(artist: string) { try { const followerCount = await getSpotifyFollowers(artist); diff --git a/lib/evals/scorers/CatalogAvailability.ts b/lib/evals/scorers/CatalogAvailability.ts index f4829ea4..8cf292d9 100644 --- a/lib/evals/scorers/CatalogAvailability.ts +++ b/lib/evals/scorers/CatalogAvailability.ts @@ -5,6 +5,11 @@ import { z } from "zod"; /** * Custom scorer that uses AI to check if recommended songs are actually in the catalog + * + * @param root0 + * @param root0.output + * @param root0.expected + * @param root0.input */ export const CatalogAvailability = async ({ output, diff --git a/lib/evals/scorers/QuestionAnswered.ts b/lib/evals/scorers/QuestionAnswered.ts index abe0222c..a7bafd1d 100644 --- a/lib/evals/scorers/QuestionAnswered.ts +++ b/lib/evals/scorers/QuestionAnswered.ts @@ -5,6 +5,11 @@ import { z } from "zod"; /** * Custom scorer that checks if the AI actually answered the customer's question * with a specific answer, or if it deflected/explained why it couldn't answer + * + * @param root0 + * @param root0.output + * @param root0.expected + * @param root0.input */ export const QuestionAnswered = async ({ output, diff --git a/lib/evals/scorers/ToolsCalled.ts b/lib/evals/scorers/ToolsCalled.ts index 2d901ec3..6a451100 100644 --- a/lib/evals/scorers/ToolsCalled.ts +++ b/lib/evals/scorers/ToolsCalled.ts @@ -1,5 +1,13 @@ /** * Generic scorer that checks if specific tools were called + * + * @param root0 + * @param root0.output + * @param root0.expected + * @param root0.input + * @param root0.toolCalls + * @param root0.requiredTools + * @param root0.penalizedTools */ export const ToolsCalled = async ({ toolCalls, diff --git a/lib/flamingo/__tests__/getFlamingoPresetsHandler.test.ts b/lib/flamingo/__tests__/getFlamingoPresetsHandler.test.ts index 19109b2d..1c30d8fc 100644 --- a/lib/flamingo/__tests__/getFlamingoPresetsHandler.test.ts +++ b/lib/flamingo/__tests__/getFlamingoPresetsHandler.test.ts @@ -17,6 +17,9 @@ vi.mock("../presets", () => ({ getPresetSummaries: vi.fn(), })); +/** + * + */ function createMockRequest(): NextRequest { return { headers: new Headers({ "x-api-key": "test-key" }), diff --git a/lib/flamingo/getFlamingoPresetsHandler.ts b/lib/flamingo/getFlamingoPresetsHandler.ts index e35b5899..f33d491d 100644 --- a/lib/flamingo/getFlamingoPresetsHandler.ts +++ b/lib/flamingo/getFlamingoPresetsHandler.ts @@ -10,6 +10,7 @@ import { validateAuthContext } from "@/lib/auth/validateAuthContext"; * Returns a list of all available analysis presets. * Requires authentication via x-api-key header or Authorization bearer token. * + * @param request * @returns A NextResponse with the list of available presets. */ export async function getFlamingoPresetsHandler(request: NextRequest): Promise { diff --git a/lib/github/__tests__/createOrUpdateFileContent.test.ts b/lib/github/__tests__/createOrUpdateFileContent.test.ts index 8e2a19a1..f8fee1a1 100644 --- a/lib/github/__tests__/createOrUpdateFileContent.test.ts +++ b/lib/github/__tests__/createOrUpdateFileContent.test.ts @@ -1,12 +1,12 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; import { createOrUpdateFileContent } from "../createOrUpdateFileContent"; +import { parseGitHubRepoUrl } from "../parseGitHubRepoUrl"; + vi.mock("../parseGitHubRepoUrl", () => ({ parseGitHubRepoUrl: vi.fn(), })); -import { parseGitHubRepoUrl } from "../parseGitHubRepoUrl"; - const mockFetch = vi.fn(); global.fetch = mockFetch; diff --git a/lib/github/expandSubmoduleEntries.ts b/lib/github/expandSubmoduleEntries.ts index 9531bee1..3082c63b 100644 --- a/lib/github/expandSubmoduleEntries.ts +++ b/lib/github/expandSubmoduleEntries.ts @@ -11,9 +11,15 @@ interface SubmoduleRef { * Resolves submodule URLs from .gitmodules, fetches each submodule's tree, * and merges the results into the regular entries with correct path prefixes. * + * @param regularEntries.regularEntries * @param regularEntries - Non-submodule file tree entries * @param submoduleEntries - Submodule references (type "commit" from GitHub Trees API) * @param repo - Repository context for fetching .gitmodules + * @param regularEntries.submoduleEntries + * @param regularEntries.repo + * @param regularEntries.repo.owner + * @param regularEntries.repo.repo + * @param regularEntries.repo.branch * @returns Combined file tree entries with submodules expanded as directories */ export async function expandSubmoduleEntries({ diff --git a/lib/github/getRepoGitModules.ts b/lib/github/getRepoGitModules.ts index caa0304e..8913a6ae 100644 --- a/lib/github/getRepoGitModules.ts +++ b/lib/github/getRepoGitModules.ts @@ -4,9 +4,12 @@ import { parseGitModules, type SubmoduleEntry } from "./parseGitModules"; * Fetches and parses .gitmodules from a GitHub repository. * Uses the GitHub Contents API (works for both public and private repos). * + * @param owner.owner * @param owner - The GitHub repository owner * @param repo - The GitHub repository name * @param branch - The branch to fetch from + * @param owner.repo + * @param owner.branch * @returns Array of submodule entries, or null if .gitmodules doesn't exist or fetch fails */ export async function getRepoGitModules({ diff --git a/lib/github/resolveSubmodulePath.ts b/lib/github/resolveSubmodulePath.ts index 7c3f60ed..029f1b1d 100644 --- a/lib/github/resolveSubmodulePath.ts +++ b/lib/github/resolveSubmodulePath.ts @@ -6,8 +6,10 @@ import { getRepoGitModules } from "./getRepoGitModules"; * If the path falls within a submodule, returns the submodule's repo URL * and the relative path within it. Otherwise returns the original values. * + * @param githubRepo.githubRepo * @param githubRepo - The parent GitHub repository URL * @param path - The file path to resolve + * @param githubRepo.path * @returns The resolved repo URL and path */ export async function resolveSubmodulePath({ diff --git a/lib/mcp/resolveAccountId.ts b/lib/mcp/resolveAccountId.ts index 03d1d0d8..456fe4c6 100644 --- a/lib/mcp/resolveAccountId.ts +++ b/lib/mcp/resolveAccountId.ts @@ -16,6 +16,8 @@ export interface ResolveAccountIdResult { * Validates access when an org API key attempts to use an account_id override. * * @param params - The auth info and optional account_id override. + * @param params.authInfo + * @param params.accountIdOverride * @returns The resolved accountId or an error message. */ export async function resolveAccountId({ diff --git a/lib/mcp/tools/content/callContentEndpoint.ts b/lib/mcp/tools/content/callContentEndpoint.ts new file mode 100644 index 00000000..2ed44f0e --- /dev/null +++ b/lib/mcp/tools/content/callContentEndpoint.ts @@ -0,0 +1,45 @@ +import { resolveAccountId } from "@/lib/mcp/resolveAccountId"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; + +const API_BASE = process.env.RECOUP_API_URL || "https://recoup-api.vercel.app"; + +/** + * Proxies a request to a content API endpoint, forwarding the caller's API key. + * Keeps MCP tools DRY by reusing the existing REST handlers for auth + business logic. + * + * @param path - API path starting with "/api/..." (e.g. "/api/content/image"). + * @param method - HTTP method. + * @param body - JSON body to send (omit for GET). + * @param authInfo - MCP auth info from the request context. + * @returns Parsed response data or an error string. + */ +export async function callContentEndpoint( + path: string, + method: "GET" | "POST" | "PATCH", + body: Record | undefined, + authInfo: McpAuthInfo | undefined, +): Promise<{ data?: unknown; error?: string }> { + const { accountId, error } = await resolveAccountId({ + authInfo, + accountIdOverride: undefined, + }); + if (error) return { error }; + if (!accountId) return { error: "Authentication required." }; + + const apiKey = authInfo?.token; + if (!apiKey) return { error: "API key required." }; + + const url = `${API_BASE}${path}`; + const res = await fetch(url, { + method, + headers: { + "Content-Type": "application/json", + "x-api-key": apiKey, + }, + ...(body ? { body: JSON.stringify(body) } : {}), + }); + + const data = await res.json(); + if (!res.ok) return { error: data.error || `Request failed: ${res.status}` }; + return { data }; +} diff --git a/lib/mcp/tools/content/index.ts b/lib/mcp/tools/content/index.ts new file mode 100644 index 00000000..7e489e74 --- /dev/null +++ b/lib/mcp/tools/content/index.ts @@ -0,0 +1,27 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import { registerGenerateContentImageTool } from "./registerGenerateContentImageTool"; +import { registerGenerateContentVideoTool } from "./registerGenerateContentVideoTool"; +import { registerGenerateContentCaptionTool } from "./registerGenerateContentCaptionTool"; +import { registerTranscribeContentAudioTool } from "./registerTranscribeContentAudioTool"; +import { registerEditContentTool } from "./registerEditContentTool"; +import { registerUpscaleContentTool } from "./registerUpscaleContentTool"; +import { registerAnalyzeContentVideoTool } from "./registerAnalyzeContentVideoTool"; +import { registerListContentTemplatesTool } from "./registerListContentTemplatesTool"; +import { registerCreateContentTool } from "./registerCreateContentTool"; + +/** + * Registers all content-creation MCP tools on the server. + * + * @param server - The MCP server instance to register tools on. + */ +export const registerAllContentTools = (server: McpServer): void => { + registerGenerateContentImageTool(server); + registerGenerateContentVideoTool(server); + registerGenerateContentCaptionTool(server); + registerTranscribeContentAudioTool(server); + registerEditContentTool(server); + registerUpscaleContentTool(server); + registerAnalyzeContentVideoTool(server); + registerListContentTemplatesTool(server); + registerCreateContentTool(server); +}; diff --git a/lib/mcp/tools/content/registerAnalyzeContentVideoTool.ts b/lib/mcp/tools/content/registerAnalyzeContentVideoTool.ts new file mode 100644 index 00000000..c9c287e6 --- /dev/null +++ b/lib/mcp/tools/content/registerAnalyzeContentVideoTool.ts @@ -0,0 +1,66 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + video_url: z.string().url().describe("URL of the video to analyze."), + prompt: z + .string() + .min(1) + .max(2000) + .describe( + "Question or instruction for the analysis (e.g. 'Describe all scenes', 'Count the number of people').", + ), + temperature: z + .number() + .min(0) + .max(1) + .optional() + .describe( + "Sampling temperature for the AI response (0-1). Lower = more deterministic. Defaults to 0.2.", + ), + max_tokens: z + .number() + .int() + .min(1) + .max(4096) + .optional() + .describe("Maximum tokens in the response."), +}); + +/** + * Registers the "analyze_content_video" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerAnalyzeContentVideoTool(server: McpServer): void { + server.registerTool( + "analyze_content_video", + { + description: + "Analyze a video with AI. Describe scenes, check quality, count subjects, " + + "evaluate for social media — ask anything about the video.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/analyze", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerCreateContentTool.ts b/lib/mcp/tools/content/registerCreateContentTool.ts new file mode 100644 index 00000000..d26f5148 --- /dev/null +++ b/lib/mcp/tools/content/registerCreateContentTool.ts @@ -0,0 +1,73 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + artist_account_id: z + .string() + .uuid() + .describe( + "The artist's account ID (UUID). This is the target artist, not the caller's account.", + ), + template: z + .string() + .optional() + .describe("Template ID for the content pipeline (use list_content_templates to see options)."), + lipsync: z + .boolean() + .optional() + .describe("Enable lipsync mode for the video step. Defaults to false."), + caption_length: z + .enum(["short", "medium", "long"]) + .optional() + .describe("Length of the generated caption. Defaults to 'short'."), + batch: z + .number() + .int() + .min(1) + .max(30) + .optional() + .describe("Number of content pieces to generate in parallel (1-30). Defaults to 1."), + songs: z + .array(z.string()) + .optional() + .describe("Array of song URLs or identifiers to use in content creation."), +}); + +/** + * Registers the "create_content" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerCreateContentTool(server: McpServer): void { + server.registerTool( + "create_content", + { + description: + "Run the full content creation pipeline in one call. " + + "Generates image, video, caption, and edit for an artist. " + + "Returns background task run IDs.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/create", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerEditContentTool.ts b/lib/mcp/tools/content/registerEditContentTool.ts new file mode 100644 index 00000000..49c8448a --- /dev/null +++ b/lib/mcp/tools/content/registerEditContentTool.ts @@ -0,0 +1,109 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const operationSchema = z.discriminatedUnion("type", [ + z.object({ + type: z.literal("trim"), + start: z.number().nonnegative().describe("Start time in seconds."), + duration: z.number().positive().describe("Duration in seconds."), + }), + z.object({ + type: z.literal("crop"), + aspect: z.string().optional().describe("Target aspect ratio string (e.g. '16:9')."), + width: z.number().int().positive().optional().describe("Target width in pixels."), + height: z.number().int().positive().optional().describe("Target height in pixels."), + }), + z.object({ + type: z.literal("resize"), + width: z.number().int().positive().optional().describe("Target width in pixels."), + height: z.number().int().positive().optional().describe("Target height in pixels."), + }), + z.object({ + type: z.literal("overlay_text"), + content: z.string().min(1).describe("Text content to overlay."), + font: z.string().optional().describe("Font name."), + color: z.string().optional().describe("Text color. Defaults to 'white'."), + stroke_color: z.string().optional().describe("Stroke/outline color. Defaults to 'black'."), + max_font_size: z + .number() + .positive() + .optional() + .describe("Maximum font size in pixels. Defaults to 42."), + position: z + .enum(["top", "center", "bottom"]) + .optional() + .describe("Text position on screen. Defaults to 'bottom'."), + }), + z.object({ + type: z.literal("mux_audio"), + audio_url: z.string().url().describe("URL of the audio track to mux in."), + replace: z.boolean().optional().describe("Replace existing audio track. Defaults to true."), + }), +]); + +const inputSchema = z.object({ + video_url: z + .string() + .url() + .optional() + .describe("URL of the video to edit. At least one of video_url or audio_url is required."), + audio_url: z.string().url().optional().describe("URL of the audio to edit."), + template: z + .string() + .optional() + .describe("Template ID for preset edit operations. Provide template OR operations."), + operations: z + .array(operationSchema) + .optional() + .describe("Array of edit operations to apply (trim, crop, resize, overlay_text, mux_audio)."), + output_format: z + .enum(["mp4", "webm", "mov"]) + .optional() + .describe("Output format. Defaults to 'mp4'."), +}); + +/** + * Registers the "edit_content" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerEditContentTool(server: McpServer): void { + server.registerTool( + "edit_content", + { + description: + "Edit content — trim, crop, resize, overlay text, or add audio. " + + "Pass a template for preset operations, or specify operations manually. " + + "Returns a background task run ID.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + if (!args.video_url && !args.audio_url) { + return getToolResultError("At least one of 'video_url' or 'audio_url' must be provided."); + } + if (!args.template && (!args.operations || args.operations.length === 0)) { + return getToolResultError("Provide either 'template' or 'operations'."); + } + + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content", + "PATCH", + args as unknown as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerGenerateContentCaptionTool.ts b/lib/mcp/tools/content/registerGenerateContentCaptionTool.ts new file mode 100644 index 00000000..bd2f8022 --- /dev/null +++ b/lib/mcp/tools/content/registerGenerateContentCaptionTool.ts @@ -0,0 +1,50 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + topic: z + .string() + .min(1) + .describe("Subject or theme for the caption (e.g. 'new album drop', 'summer vibes tour')."), + length: z + .enum(["short", "medium", "long"]) + .optional() + .describe("Caption length tier. Defaults to 'short'."), + template: z.string().optional().describe("Template ID for caption style and tone presets."), +}); + +/** + * Registers the "generate_content_caption" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerGenerateContentCaptionTool(server: McpServer): void { + server.registerTool( + "generate_content_caption", + { + description: "Generate an on-screen caption or text overlay for social media content.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/caption", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerGenerateContentImageTool.ts b/lib/mcp/tools/content/registerGenerateContentImageTool.ts new file mode 100644 index 00000000..f8095701 --- /dev/null +++ b/lib/mcp/tools/content/registerGenerateContentImageTool.ts @@ -0,0 +1,94 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + prompt: z + .string() + .optional() + .describe( + "Text prompt describing the image to generate. Required unless template is provided.", + ), + template: z + .string() + .optional() + .describe( + "Template ID for curated visual style presets (use list_content_templates to see options).", + ), + reference_image_url: z + .string() + .url() + .optional() + .describe("URL of a reference image for face/style transfer."), + aspect_ratio: z + .enum([ + "auto", + "21:9", + "16:9", + "3:2", + "4:3", + "5:4", + "1:1", + "4:5", + "3:4", + "2:3", + "9:16", + "4:1", + "1:4", + "8:1", + "1:8", + ]) + .optional() + .describe("Aspect ratio for the generated image. Defaults to 'auto'."), + resolution: z + .enum(["0.5K", "1K", "2K", "4K"]) + .optional() + .describe("Output resolution. Defaults to '1K'."), + num_images: z + .number() + .int() + .min(1) + .max(4) + .optional() + .describe("Number of images to generate (1-4). Defaults to 1."), +}); + +/** + * Registers the "generate_content_image" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerGenerateContentImageTool(server: McpServer): void { + server.registerTool( + "generate_content_image", + { + description: + "Generate an image from a text prompt, optionally using a reference image for face/style transfer. Supports templates for curated visual styles.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + if (!args.prompt && !args.template) { + return getToolResultError("At least one of 'prompt' or 'template' must be provided."); + } + + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/image", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerGenerateContentVideoTool.ts b/lib/mcp/tools/content/registerGenerateContentVideoTool.ts new file mode 100644 index 00000000..c45da4b7 --- /dev/null +++ b/lib/mcp/tools/content/registerGenerateContentVideoTool.ts @@ -0,0 +1,75 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + mode: z + .enum(["prompt", "animate", "reference", "extend", "first-last", "lipsync"]) + .optional() + .describe( + "Video generation mode. Auto-inferred from inputs if omitted. " + + "'prompt' = text-to-video, 'animate' = image-to-video, 'reference' = style reference, " + + "'extend' = continue a video, 'first-last' = transition between two images, " + + "'lipsync' = sync face to audio.", + ), + prompt: z.string().optional().describe("Text prompt describing the video to generate."), + image_url: z + .string() + .url() + .optional() + .describe("URL of an input image (for animate, reference, first-last, or lipsync modes)."), + end_image_url: z + .string() + .url() + .optional() + .describe("URL of the ending frame image (for first-last mode)."), + video_url: z.string().url().optional().describe("URL of a video to extend (for extend mode)."), + audio_url: z.string().url().optional().describe("URL of audio for lipsync mode."), + template: z.string().optional().describe("Template ID for curated style presets."), + aspect_ratio: z + .enum(["auto", "16:9", "9:16"]) + .optional() + .describe("Aspect ratio for the generated video. Defaults to 'auto'."), + duration: z + .enum(["4s", "6s", "7s", "8s"]) + .optional() + .describe("Video duration. Defaults to '8s'."), +}); + +/** + * Registers the "generate_content_video" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerGenerateContentVideoTool(server: McpServer): void { + server.registerTool( + "generate_content_video", + { + description: + "Generate a video. Supports 6 modes: prompt (text-to-video), animate (image-to-video), " + + "reference (style reference), extend (continue a video), first-last (transition between images), " + + "lipsync (face sync to audio). Mode is auto-inferred from inputs if not specified.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/video", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerListContentTemplatesTool.ts b/lib/mcp/tools/content/registerListContentTemplatesTool.ts new file mode 100644 index 00000000..3d711487 --- /dev/null +++ b/lib/mcp/tools/content/registerListContentTemplatesTool.ts @@ -0,0 +1,42 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({}); + +/** + * Registers the "list_content_templates" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerListContentTemplatesTool(server: McpServer): void { + server.registerTool( + "list_content_templates", + { + description: + "List all available content creation templates. " + + "Templates are optional shortcuts — curated creative recipes that pre-fill parameters.", + inputSchema, + }, + async ( + _args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/templates", + "GET", + undefined, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerTranscribeContentAudioTool.ts b/lib/mcp/tools/content/registerTranscribeContentAudioTool.ts new file mode 100644 index 00000000..624dbfe9 --- /dev/null +++ b/lib/mcp/tools/content/registerTranscribeContentAudioTool.ts @@ -0,0 +1,51 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + audio_urls: z.array(z.string().url()).min(1).describe("Array of audio file URLs to transcribe."), + language: z + .string() + .optional() + .describe("Language code for transcription (e.g. 'en', 'es'). Defaults to 'en'."), + chunk_level: z + .enum(["none", "segment", "word"]) + .optional() + .describe("Granularity of timestamp chunks: 'none', 'segment', or 'word'. Defaults to 'word'."), +}); + +/** + * Registers the "transcribe_content_audio" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerTranscribeContentAudioTool(server: McpServer): void { + server.registerTool( + "transcribe_content_audio", + { + description: + "Transcribe audio to timestamped text. Returns full lyrics and individual word/segment timestamps.", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/transcribe", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/content/registerUpscaleContentTool.ts b/lib/mcp/tools/content/registerUpscaleContentTool.ts new file mode 100644 index 00000000..773a9c74 --- /dev/null +++ b/lib/mcp/tools/content/registerUpscaleContentTool.ts @@ -0,0 +1,53 @@ +import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; +import type { RequestHandlerExtra } from "@modelcontextprotocol/sdk/shared/protocol.js"; +import type { ServerRequest, ServerNotification } from "@modelcontextprotocol/sdk/types.js"; +import { z } from "zod"; +import type { McpAuthInfo } from "@/lib/mcp/verifyApiKey"; +import { getToolResultSuccess } from "@/lib/mcp/getToolResultSuccess"; +import { getToolResultError } from "@/lib/mcp/getToolResultError"; +import { callContentEndpoint } from "./callContentEndpoint"; + +const inputSchema = z.object({ + url: z.string().url().describe("URL of the image or video to upscale."), + type: z.enum(["image", "video"]).describe("Whether the input is an image or video."), + upscale_factor: z + .number() + .min(1) + .max(4) + .optional() + .describe("Upscale multiplier (1-4). Defaults to 2."), + target_resolution: z + .enum(["720p", "1080p", "1440p", "2160p"]) + .optional() + .describe("Target resolution instead of a factor. Overrides upscale_factor when set."), +}); + +/** + * Registers the "upscale_content" tool on the MCP server. + * + * @param server - The MCP server instance. + */ +export function registerUpscaleContentTool(server: McpServer): void { + server.registerTool( + "upscale_content", + { + description: "Upscale an image or video to higher resolution (up to 4x or 4K).", + inputSchema, + }, + async ( + args: z.infer, + extra: RequestHandlerExtra, + ) => { + const authInfo = extra.authInfo as McpAuthInfo | undefined; + const { data, error } = await callContentEndpoint( + "/api/content/upscale", + "POST", + args as Record, + authInfo, + ); + + if (error) return getToolResultError(error); + return getToolResultSuccess(data); + }, + ); +} diff --git a/lib/mcp/tools/index.ts b/lib/mcp/tools/index.ts index e95da17f..2230271b 100644 --- a/lib/mcp/tools/index.ts +++ b/lib/mcp/tools/index.ts @@ -3,6 +3,7 @@ import { registerGetLocalTimeTool } from "./registerGetLocalTimeTool"; import { registerAllTaskTools } from "./tasks"; import { registerAllImageTools } from "./images"; import { registerAllCatalogTools } from "./catalogs"; +import { registerAllContentTools } from "./content"; import { registerAllSora2Tools } from "./sora2"; import { registerAllSpotifyTools } from "./spotify"; import { registerContactTeamTool } from "./registerContactTeamTool"; @@ -35,6 +36,7 @@ export const registerAllTools = (server: McpServer): void => { registerAllArtistTools(server); registerAllArtistSocialsTools(server); registerAllCatalogTools(server); + registerAllContentTools(server); registerAllChatsTools(server); registerAllFileTools(server); registerAllFlamingoTools(server); diff --git a/lib/mcp/tools/transcribe/registerTranscribeAudioTool.ts b/lib/mcp/tools/transcribe/registerTranscribeAudioTool.ts index 4942fdfb..d8a64f79 100644 --- a/lib/mcp/tools/transcribe/registerTranscribeAudioTool.ts +++ b/lib/mcp/tools/transcribe/registerTranscribeAudioTool.ts @@ -15,6 +15,10 @@ const transcribeAudioSchema = z.object({ type TranscribeAudioArgs = z.infer; +/** + * + * @param server + */ export function registerTranscribeAudioTool(server: McpServer): void { server.registerTool( "transcribe_audio", diff --git a/lib/notifications/__tests__/createNotificationHandler.test.ts b/lib/notifications/__tests__/createNotificationHandler.test.ts index ca7fb677..60b6e5ba 100644 --- a/lib/notifications/__tests__/createNotificationHandler.test.ts +++ b/lib/notifications/__tests__/createNotificationHandler.test.ts @@ -26,6 +26,10 @@ vi.mock("@/lib/networking/safeParseJson", () => ({ safeParseJson: vi.fn(async (req: Request) => req.json()), })); +/** + * + * @param body + */ function createRequest(body: unknown): NextRequest { return new NextRequest("https://recoup-api.vercel.app/api/notifications", { method: "POST", diff --git a/lib/notifications/__tests__/validateCreateNotificationBody.test.ts b/lib/notifications/__tests__/validateCreateNotificationBody.test.ts index 10390b15..645ccedc 100644 --- a/lib/notifications/__tests__/validateCreateNotificationBody.test.ts +++ b/lib/notifications/__tests__/validateCreateNotificationBody.test.ts @@ -16,6 +16,11 @@ vi.mock("@/lib/networking/safeParseJson", () => ({ safeParseJson: vi.fn(async (req: Request) => req.json()), })); +/** + * + * @param body + * @param headers + */ function createRequest(body: unknown, headers: Record = {}): NextRequest { const defaultHeaders: Record = { "Content-Type": "application/json" }; return new NextRequest("http://localhost/api/notifications", { diff --git a/lib/prompts/getSystemPrompt.ts b/lib/prompts/getSystemPrompt.ts index 54964670..5077609a 100644 --- a/lib/prompts/getSystemPrompt.ts +++ b/lib/prompts/getSystemPrompt.ts @@ -13,6 +13,7 @@ import { AccountWithDetails } from "@/lib/supabase/accounts/getAccountWithDetail * @param params.artistInstruction - The artist instruction * @param params.conversationName - The name of the conversation * @param params.accountWithDetails - The account with details + * @param params.orgId * @returns The system prompt */ export function getSystemPrompt({ diff --git a/lib/slack/getBotChannels.ts b/lib/slack/getBotChannels.ts index 01fb47ff..6c2f905a 100644 --- a/lib/slack/getBotChannels.ts +++ b/lib/slack/getBotChannels.ts @@ -9,6 +9,8 @@ interface ConversationsListResponse { /** * Returns all channels the bot is a member of, paginating through all results. + * + * @param token */ export async function getBotChannels(token: string): Promise> { const channels: Array<{ id: string; name: string }> = []; diff --git a/lib/slack/getBotUserId.ts b/lib/slack/getBotUserId.ts index 1c3e0924..673ec465 100644 --- a/lib/slack/getBotUserId.ts +++ b/lib/slack/getBotUserId.ts @@ -8,6 +8,8 @@ interface AuthTestResponse { /** * Returns the authenticated bot's Slack user ID via auth.test. + * + * @param token */ export async function getBotUserId(token: string): Promise { const authTest = await slackGet("auth.test", token); diff --git a/lib/slack/getSlackUserInfo.ts b/lib/slack/getSlackUserInfo.ts index eb144e45..91873ddf 100644 --- a/lib/slack/getSlackUserInfo.ts +++ b/lib/slack/getSlackUserInfo.ts @@ -16,6 +16,9 @@ interface UsersInfoResponse { /** * Fetches a Slack account's display name and avatar by their Slack ID. + * + * @param token + * @param userId */ export async function getSlackUserInfo( token: string, diff --git a/lib/spotify/getSpotifyFollowers.ts b/lib/spotify/getSpotifyFollowers.ts index 235de41e..acd1c3be 100644 --- a/lib/spotify/getSpotifyFollowers.ts +++ b/lib/spotify/getSpotifyFollowers.ts @@ -37,6 +37,7 @@ interface SpotifySearchResponse { /** * Get Spotify follower count for an artist + * * @param artistName - The name of the artist to search for * @returns Promise - The follower count of the first matching artist */ diff --git a/lib/supabase/account_artist_ids/getAccountArtistIds.ts b/lib/supabase/account_artist_ids/getAccountArtistIds.ts index e4e6b809..42b550d0 100644 --- a/lib/supabase/account_artist_ids/getAccountArtistIds.ts +++ b/lib/supabase/account_artist_ids/getAccountArtistIds.ts @@ -8,7 +8,9 @@ export type AccountArtistRow = ArtistQueryRow & { artist_id: string; pinned: boo * Get all artists for an array of artist IDs or account IDs, with full info. * Returns raw data - formatting should be done by caller. * - * @param params Object with artistIds or accountIds array + * @param params - Object with artistIds or accountIds array + * @param params.artistIds + * @param params.accountIds * @returns Array of raw artist rows from database */ export async function getAccountArtistIds(params: { diff --git a/lib/supabase/account_workspace_ids/getAccountWorkspaceIds.ts b/lib/supabase/account_workspace_ids/getAccountWorkspaceIds.ts index ae121fdd..4ca7ad8e 100644 --- a/lib/supabase/account_workspace_ids/getAccountWorkspaceIds.ts +++ b/lib/supabase/account_workspace_ids/getAccountWorkspaceIds.ts @@ -10,7 +10,7 @@ export type AccountWorkspaceRow = Omit & { * Get all workspaces for an account, with full info. * Returns raw data - formatting should be done by caller. * - * @param accountId The owner's account ID + * @param accountId - The owner's account ID * @returns Array of raw workspace rows from database */ export async function getAccountWorkspaceIds(accountId: string): Promise { diff --git a/lib/supabase/files/createFileRecord.ts b/lib/supabase/files/createFileRecord.ts index 6f836f3c..3182de11 100644 --- a/lib/supabase/files/createFileRecord.ts +++ b/lib/supabase/files/createFileRecord.ts @@ -25,6 +25,8 @@ export interface CreateFileRecordParams { /** * Create a file record in the database + * + * @param params */ export async function createFileRecord(params: CreateFileRecordParams): Promise { const { diff --git a/lib/supabase/song_artists/insertSongArtists.ts b/lib/supabase/song_artists/insertSongArtists.ts index b81879e3..69878d6d 100644 --- a/lib/supabase/song_artists/insertSongArtists.ts +++ b/lib/supabase/song_artists/insertSongArtists.ts @@ -5,6 +5,8 @@ export type SongArtistInsert = TablesInsert<"song_artists">; /** * Inserts song-artist relationships, skipping duplicates. + * + * @param songArtists */ export async function insertSongArtists(songArtists: SongArtistInsert[]): Promise { const records = songArtists.filter( diff --git a/lib/supabase/storage/uploadFileByKey.ts b/lib/supabase/storage/uploadFileByKey.ts index ba146fa3..ae149173 100644 --- a/lib/supabase/storage/uploadFileByKey.ts +++ b/lib/supabase/storage/uploadFileByKey.ts @@ -3,6 +3,12 @@ import { SUPABASE_STORAGE_BUCKET } from "@/lib/const"; /** * Upload file to Supabase storage by key + * + * @param key + * @param file + * @param options + * @param options.contentType + * @param options.upsert */ export async function uploadFileByKey( key: string, diff --git a/lib/tasks/__tests__/enrichTaskWithTriggerInfo.test.ts b/lib/tasks/__tests__/enrichTaskWithTriggerInfo.test.ts index 60d38a96..c6082f98 100644 --- a/lib/tasks/__tests__/enrichTaskWithTriggerInfo.test.ts +++ b/lib/tasks/__tests__/enrichTaskWithTriggerInfo.test.ts @@ -1,6 +1,9 @@ import { describe, it, expect, vi, beforeEach } from "vitest"; import { enrichTaskWithTriggerInfo } from "../enrichTaskWithTriggerInfo"; +import { fetchTriggerRuns } from "@/lib/trigger/fetchTriggerRuns"; +import { retrieveTaskRun } from "@/lib/trigger/retrieveTaskRun"; + vi.mock("@/lib/trigger/fetchTriggerRuns", () => ({ fetchTriggerRuns: vi.fn(), })); @@ -9,9 +12,6 @@ vi.mock("@/lib/trigger/retrieveTaskRun", () => ({ retrieveTaskRun: vi.fn(), })); -import { fetchTriggerRuns } from "@/lib/trigger/fetchTriggerRuns"; -import { retrieveTaskRun } from "@/lib/trigger/retrieveTaskRun"; - const mockTask = { id: "task-123", title: "Test Task", diff --git a/lib/tasks/__tests__/getTaskRunHandler.test.ts b/lib/tasks/__tests__/getTaskRunHandler.test.ts index 9f17fffc..3ab107f8 100644 --- a/lib/tasks/__tests__/getTaskRunHandler.test.ts +++ b/lib/tasks/__tests__/getTaskRunHandler.test.ts @@ -23,6 +23,9 @@ vi.mock("@/lib/networking/getCorsHeaders", () => ({ getCorsHeaders: vi.fn(() => ({ "Access-Control-Allow-Origin": "*" })), })); +/** + * + */ function createMockRequest(): NextRequest { return { url: "http://localhost:3000/api/tasks/runs", diff --git a/lib/tasks/__tests__/validateGetTaskRunQuery.test.ts b/lib/tasks/__tests__/validateGetTaskRunQuery.test.ts index f7126175..77d410da 100644 --- a/lib/tasks/__tests__/validateGetTaskRunQuery.test.ts +++ b/lib/tasks/__tests__/validateGetTaskRunQuery.test.ts @@ -24,6 +24,8 @@ vi.mock("@/lib/admins/checkIsAdmin", () => ({ /** * Creates a mock NextRequest with the given URL. + * + * @param url */ function createMockRequest(url: string): NextRequest { return { diff --git a/lib/tasks/__tests__/validateGetTasksQuery.test.ts b/lib/tasks/__tests__/validateGetTasksQuery.test.ts index b9d0dda7..11226d30 100644 --- a/lib/tasks/__tests__/validateGetTasksQuery.test.ts +++ b/lib/tasks/__tests__/validateGetTasksQuery.test.ts @@ -22,6 +22,10 @@ vi.mock("@/lib/admins/checkIsAdmin", () => ({ checkIsAdmin: vi.fn(), })); +/** + * + * @param url + */ function createMockRequest(url: string): NextRequest { return { url, diff --git a/lib/transcribe/processAudioTranscription.ts b/lib/transcribe/processAudioTranscription.ts index 351eee34..0e05905a 100644 --- a/lib/transcribe/processAudioTranscription.ts +++ b/lib/transcribe/processAudioTranscription.ts @@ -7,6 +7,8 @@ import { ProcessTranscriptionParams, ProcessTranscriptionResult } from "./types" /** * Fetches audio from URL, transcribes it with OpenAI Whisper, and saves both * the original audio and transcript markdown to the customer's files. + * + * @param params */ export async function processAudioTranscription( params: ProcessTranscriptionParams, @@ -64,6 +66,10 @@ export async function processAudioTranscription( }; } +/** + * + * @param contentType + */ function getExtensionFromContentType(contentType: string): string { if (contentType.includes("wav")) return "wav"; if (contentType.includes("m4a") || contentType.includes("mp4")) return "m4a"; diff --git a/lib/transcribe/saveAudioToFiles.ts b/lib/transcribe/saveAudioToFiles.ts index 12bda1ef..2124e512 100644 --- a/lib/transcribe/saveAudioToFiles.ts +++ b/lib/transcribe/saveAudioToFiles.ts @@ -2,6 +2,10 @@ import { uploadFileByKey } from "@/lib/supabase/storage/uploadFileByKey"; import { createFileRecord } from "@/lib/supabase/files/createFileRecord"; import { SaveAudioParams, FileRecord } from "./types"; +/** + * + * @param params + */ export async function saveAudioToFiles(params: SaveAudioParams): Promise { const { audioBlob, diff --git a/lib/transcribe/saveTranscriptToFiles.ts b/lib/transcribe/saveTranscriptToFiles.ts index 627feb6d..fa7518c5 100644 --- a/lib/transcribe/saveTranscriptToFiles.ts +++ b/lib/transcribe/saveTranscriptToFiles.ts @@ -2,6 +2,10 @@ import { uploadFileByKey } from "@/lib/supabase/storage/uploadFileByKey"; import { createFileRecord } from "@/lib/supabase/files/createFileRecord"; import { SaveTranscriptParams, FileRecord } from "./types"; +/** + * + * @param params + */ export async function saveTranscriptToFiles(params: SaveTranscriptParams): Promise { const { markdown, ownerAccountId, artistAccountId, title = "Transcription" } = params; diff --git a/lib/transcribe/types.ts b/lib/transcribe/types.ts index 91c0ac10..916e699c 100644 --- a/lib/transcribe/types.ts +++ b/lib/transcribe/types.ts @@ -56,6 +56,8 @@ export interface ProcessTranscriptionResult { /** * Formats transcription errors into user-friendly messages. * Centralizes error message logic to avoid duplication. + * + * @param error */ export function formatTranscriptionError(error: unknown): { message: string; status: number } { const rawMessage = error instanceof Error ? error.message : "Transcription failed"; diff --git a/lib/trigger/triggerCreateContent.ts b/lib/trigger/triggerCreateContent.ts index 9d6e5bd8..eb41fb3c 100644 --- a/lib/trigger/triggerCreateContent.ts +++ b/lib/trigger/triggerCreateContent.ts @@ -4,7 +4,7 @@ import { CREATE_CONTENT_TASK_ID } from "@/lib/const"; export interface TriggerCreateContentPayload { accountId: string; artistSlug: string; - template: string; + template?: string; lipsync: boolean; /** Controls caption length: "short", "medium", or "long". */ captionLength: "short" | "medium" | "long"; diff --git a/lib/trigger/triggerPrimitive.ts b/lib/trigger/triggerPrimitive.ts new file mode 100644 index 00000000..ad0e2c61 --- /dev/null +++ b/lib/trigger/triggerPrimitive.ts @@ -0,0 +1,12 @@ +import { tasks } from "@trigger.dev/sdk"; + +/** + * Triggers a Trigger.dev primitive task by ID. + * + * @param taskId - The Trigger.dev task identifier. + * @param payload - The task payload. + * @returns The task handle with run ID. + */ +export async function triggerPrimitive(taskId: string, payload: Record) { + return tasks.trigger(taskId, payload); +} diff --git a/package.json b/package.json index 7a9df5fb..5d12b8b0 100644 --- a/package.json +++ b/package.json @@ -29,6 +29,7 @@ "@coinbase/x402": "^0.7.3", "@composio/core": "^0.3.4", "@composio/vercel": "^0.3.4", + "@fal-ai/client": "^1.9.5", "@modelcontextprotocol/sdk": "^1.24.3", "@privy-io/node": "^0.6.2", "@supabase/supabase-js": "^2.86.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6d4e05a0..72b683bc 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -47,6 +47,9 @@ importers: '@composio/vercel': specifier: ^0.3.4 version: 0.3.4(@composio/core@0.3.4(ws@8.18.3(bufferutil@4.0.9)(utf-8-validate@5.0.10))(zod@4.1.13))(ai@6.0.0-beta.122(zod@4.1.13)) + '@fal-ai/client': + specifier: ^1.9.5 + version: 1.9.5 '@modelcontextprotocol/sdk': specifier: ^1.24.3 version: 1.24.3(zod@4.1.13) @@ -754,6 +757,10 @@ packages: resolution: {integrity: sha512-zQ0IqbdX8FZ9aw11vP+dZkKDkS+kgIvQPHnSAXzP9pLu+Rfu3D3XEeLbicvoXJTYnhZiPmsZUxgdzXwNKxRPbA==} engines: {node: '>=14'} + '@fal-ai/client@1.9.5': + resolution: {integrity: sha512-knCMOqXapzL5Lsp4Xh/B/VfvbseKgHg2Kt//MjcxN5weF59/26En3zXTPd8pljl4QAr7b62X5EuNCT69MpyjSA==} + engines: {node: '>=18.0.0'} + '@gemini-wallet/core@0.3.2': resolution: {integrity: sha512-Z4aHi3ECFf5oWYWM3F1rW83GJfB9OvhBYPTmb5q+VyK3uvzvS48lwo+jwh2eOoCRWEuT/crpb9Vwp2QaS5JqgQ==} peerDependencies: @@ -1056,6 +1063,10 @@ packages: '@cfworker/json-schema': optional: true + '@msgpack/msgpack@3.1.3': + resolution: {integrity: sha512-47XIizs9XZXvuJgoaJUIE2lFoID8ugvc0jzSHP+Ptfk8nTbnR8g788wv48N03Kx0UkAv559HWRQ3yzOgzlRNUA==} + engines: {node: '>= 18'} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': resolution: {integrity: sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw==} cpu: [arm64] @@ -5515,6 +5526,9 @@ packages: resolution: {integrity: sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==} hasBin: true + robot3@0.4.1: + resolution: {integrity: sha512-hzjy826lrxzx8eRgv80idkf8ua1JAepRc9Efdtj03N3KNJuznQCPlyCJ7gnUmDFwZCLQjxy567mQVKmdv2BsXQ==} + rollup@4.55.1: resolution: {integrity: sha512-wDv/Ht1BNHB4upNbK74s9usvl7hObDnvVzknxqY/E/O3X6rW1U1rV1aENEfJ54eFZDTNo7zv1f5N4edCluH7+A==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} @@ -7190,6 +7204,12 @@ snapshots: ethereum-cryptography: 2.2.1 micro-ftch: 0.3.1 + '@fal-ai/client@1.9.5': + dependencies: + '@msgpack/msgpack': 3.1.3 + eventsource-parser: 1.1.2 + robot3: 0.4.1 + '@gemini-wallet/core@0.3.2(viem@2.40.3(bufferutil@4.0.9)(typescript@5.9.3)(utf-8-validate@5.0.10)(zod@4.1.13))': dependencies: '@metamask/rpc-errors': 7.0.2 @@ -7564,6 +7584,8 @@ snapshots: transitivePeerDependencies: - supports-color + '@msgpack/msgpack@3.1.3': {} + '@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3': optional: true @@ -13693,6 +13715,8 @@ snapshots: dependencies: glob: 10.5.0 + robot3@0.4.1: {} + rollup@4.55.1: dependencies: '@types/estree': 1.0.8 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml new file mode 100644 index 00000000..112faff5 --- /dev/null +++ b/pnpm-workspace.yaml @@ -0,0 +1 @@ +onlyBuiltDependencies: '@coinbase/x402'