diff --git a/packages/cli/src/utils/publishProject.test.ts b/packages/cli/src/utils/publishProject.test.ts index 957be1a24..b5a3b8117 100644 --- a/packages/cli/src/utils/publishProject.test.ts +++ b/packages/cli/src/utils/publishProject.test.ts @@ -37,9 +37,38 @@ describe("createPublishArchive", () => { describe("publishProjectArchive", () => { beforeEach(() => { - vi.stubGlobal( - "fetch", - vi.fn().mockResolvedValue( + vi.stubEnv("HYPERFRAMES_PUBLISHED_PROJECTS_API_URL", ""); + vi.stubEnv("HEYGEN_API_URL", ""); + }); + + afterEach(() => { + vi.unstubAllGlobals(); + vi.unstubAllEnvs(); + }); + + it("uploads through the staged publish flow and returns the stable project URL", async () => { + const dir = makeProjectDir(); + const fetchMock = vi + .fn() + .mockResolvedValueOnce( + new Response( + JSON.stringify({ + data: { + upload_url: + "https://s3.example.com/upload?X-Amz-SignedHeaders=content-length;content-type;host;x-amz-server-side-encryption", + upload_key: "ephemeral_store/hyperframes/project_uploads/upload-1/demo.zip", + upload_headers: { + "content-type": "application/zip", + "x-amz-server-side-encryption": "AES256", + }, + content_type: "application/zip", + }, + }), + { status: 200 }, + ), + ) + .mockResolvedValueOnce(new Response(null, { status: 200 })) + .mockResolvedValueOnce( new Response( JSON.stringify({ data: { @@ -52,16 +81,9 @@ describe("publishProjectArchive", () => { }), { status: 200 }, ), - ), - ); - }); - - afterEach(() => { - vi.unstubAllGlobals(); - }); + ); + vi.stubGlobal("fetch", fetchMock); - it("uploads the archive and returns the stable project URL", async () => { - const dir = makeProjectDir(); try { writeFileSync(join(dir, "index.html"), "", "utf-8"); writeFileSync(join(dir, "styles.css"), "body {}", "utf-8"); @@ -73,8 +95,73 @@ describe("publishProjectArchive", () => { projectId: "hfp_123", url: "https://hyperframes.dev/p/hfp_123", }); - expect(fetch).toHaveBeenCalledTimes(1); - expect(fetch).toHaveBeenCalledWith( + expect(fetchMock).toHaveBeenCalledTimes(3); + expect(fetchMock).toHaveBeenNthCalledWith( + 1, + "https://api2.heygen.com/v1/hyperframes/projects/publish/upload", + expect.objectContaining({ + method: "POST", + headers: { "content-type": "application/json", heygen_route: "canary" }, + signal: expect.any(AbortSignal), + }), + ); + expect(fetchMock).toHaveBeenNthCalledWith( + 2, + "https://s3.example.com/upload?X-Amz-SignedHeaders=content-length;content-type;host;x-amz-server-side-encryption", + expect.objectContaining({ + method: "PUT", + headers: { + "content-length": expect.any(String), + "content-type": "application/zip", + "x-amz-server-side-encryption": "AES256", + }, + signal: expect.any(AbortSignal), + }), + ); + expect(fetchMock).toHaveBeenNthCalledWith( + 3, + "https://api2.heygen.com/v1/hyperframes/projects/publish/complete", + expect.objectContaining({ + method: "POST", + headers: { "content-type": "application/json", heygen_route: "canary" }, + signal: expect.any(AbortSignal), + }), + ); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); + + it("falls back to the legacy multipart endpoint when staged publish is not deployed", async () => { + const dir = makeProjectDir(); + const fetchMock = vi + .fn() + .mockResolvedValueOnce(new Response("not found", { status: 404 })) + .mockResolvedValueOnce( + new Response( + JSON.stringify({ + data: { + project_id: "hfp_123", + title: "demo", + file_count: 2, + url: "https://hyperframes.dev/p/hfp_123", + claim_token: "claim-token", + }, + }), + { status: 200 }, + ), + ); + vi.stubGlobal("fetch", fetchMock); + + try { + writeFileSync(join(dir, "index.html"), "", "utf-8"); + + const result = await publishProjectArchive(dir); + + expect(result.projectId).toBe("hfp_123"); + expect(fetchMock).toHaveBeenCalledTimes(2); + expect(fetchMock).toHaveBeenNthCalledWith( + 2, "https://api2.heygen.com/v1/hyperframes/projects/publish", expect.objectContaining({ method: "POST", @@ -86,4 +173,37 @@ describe("publishProjectArchive", () => { rmSync(dir, { recursive: true, force: true }); } }); + + it("does not fall back to multipart when a staged S3 upload fails", async () => { + const dir = makeProjectDir(); + const fetchMock = vi + .fn() + .mockResolvedValueOnce( + new Response( + JSON.stringify({ + data: { + upload_url: "https://s3.example.com/upload", + upload_key: "ephemeral_store/hyperframes/project_uploads/upload-1/demo.zip", + upload_headers: { + "content-type": "application/zip", + "x-amz-server-side-encryption": "AES256", + }, + content_type: "application/zip", + }, + }), + { status: 200 }, + ), + ) + .mockResolvedValueOnce(new Response("denied", { status: 403 })); + vi.stubGlobal("fetch", fetchMock); + + try { + writeFileSync(join(dir, "index.html"), "", "utf-8"); + + await expect(publishProjectArchive(dir)).rejects.toThrow("Failed to upload project archive"); + expect(fetchMock).toHaveBeenCalledTimes(2); + } finally { + rmSync(dir, { recursive: true, force: true }); + } + }); }); diff --git a/packages/cli/src/utils/publishProject.ts b/packages/cli/src/utils/publishProject.ts index 536b19d47..3b4d62855 100644 --- a/packages/cli/src/utils/publishProject.ts +++ b/packages/cli/src/utils/publishProject.ts @@ -4,6 +4,8 @@ import AdmZip from "adm-zip"; const IGNORED_DIRS = new Set([".git", "node_modules", "dist", ".next", "coverage"]); const IGNORED_FILES = new Set([".DS_Store", "Thumbs.db"]); +const PUBLISH_CONTENT_TYPE = "application/zip"; +const PUBLISH_REQUEST_TIMEOUT_MS = 30_000; export interface PublishArchiveResult { buffer: Buffer; @@ -18,6 +20,128 @@ export interface PublishedProjectResponse { claimToken: string; } +interface StagedUploadResponse { + uploadUrl: string; + uploadKey: string; + contentType: string; + uploadHeaders: Record; +} + +type JsonRecord = Record; + +function isRecord(value: unknown): value is JsonRecord { + return typeof value === "object" && value !== null && !Array.isArray(value); +} + +function dataRecord(payload: unknown): JsonRecord | null { + if (!isRecord(payload) || !isRecord(payload["data"])) return null; + return payload["data"]; +} + +function stringField(record: JsonRecord, key: string): string | null { + const value = record[key]; + return typeof value === "string" ? value : null; +} + +function parsePublishedProjectResponse(payload: unknown): PublishedProjectResponse | null { + const data = dataRecord(payload); + if (!data) return null; + const projectId = stringField(data, "project_id"); + const title = stringField(data, "title"); + const url = stringField(data, "url"); + const claimToken = stringField(data, "claim_token"); + const fileCount = data["file_count"]; + if (!projectId || !title || !url || !claimToken || typeof fileCount !== "number") { + return null; + } + return { + projectId, + title, + fileCount, + url, + claimToken, + }; +} + +function parseStagedUploadResponse( + payload: unknown, + archiveByteLength: number, +): StagedUploadResponse | null { + const data = dataRecord(payload); + if (!data) return null; + const uploadUrl = stringField(data, "upload_url"); + const uploadKey = stringField(data, "upload_key"); + const contentType = stringField(data, "content_type") || PUBLISH_CONTENT_TYPE; + if (!uploadUrl || !uploadKey) return null; + return { + uploadUrl, + uploadKey, + contentType, + uploadHeaders: getUploadHeaders(data, uploadUrl, contentType, archiveByteLength), + }; +} + +function getUploadHeaders( + data: JsonRecord, + uploadUrl: string, + contentType: string, + archiveByteLength: number, +): Record { + const headers: Record = {}; + const uploadHeaders = data["upload_headers"]; + if (isRecord(uploadHeaders)) { + for (const [key, value] of Object.entries(uploadHeaders)) { + if (typeof value === "string" && key.trim()) { + headers[key] = value; + } + } + } + + if (!Object.keys(headers).some((key) => key.toLowerCase() === "content-type")) { + headers["content-type"] = contentType; + } + + const signedHeaders = new URL(uploadUrl).searchParams.get("X-Amz-SignedHeaders"); + if ( + signedHeaders?.split(";").includes("x-amz-server-side-encryption") && + !Object.keys(headers).some((key) => key.toLowerCase() === "x-amz-server-side-encryption") + ) { + headers["x-amz-server-side-encryption"] = "AES256"; + } + if ( + signedHeaders?.split(";").includes("content-length") && + !Object.keys(headers).some((key) => key.toLowerCase() === "content-length") + ) { + headers["content-length"] = String(archiveByteLength); + } + + return headers; +} + +async function readJson(response: Response): Promise { + return response + .clone() + .json() + .catch(() => null); +} + +async function readErrorMessage(response: Response, fallback: string): Promise { + const contentType = response.headers.get("content-type") || ""; + if (contentType.includes("application/json")) { + const payload = await readJson(response); + if (isRecord(payload) && typeof payload["message"] === "string") { + return payload["message"]; + } + } + + if (response.status === 403 && response.headers.get("cf-mitigated") === "challenge") { + return "Publish upload was blocked before reaching HyperFrames. Please retry after staged uploads are available."; + } + + const text = await response.text().catch(() => ""); + return text.trim() ? `${fallback}: ${text.trim().slice(0, 180)}` : fallback; +} + function shouldIgnoreSegment(segment: string): boolean { return segment.startsWith(".") || IGNORED_DIRS.has(segment) || IGNORED_FILES.has(segment); } @@ -65,37 +189,111 @@ export function getPublishApiBaseUrl(): string { ).replace(/\/$/, ""); } -export async function publishProjectArchive(projectDir: string): Promise { - const title = basename(projectDir); - const archive = createPublishArchive(projectDir); - const archiveBytes = new Uint8Array(archive.buffer.byteLength); - archiveBytes.set(archive.buffer); +function archiveArrayBuffer(archive: PublishArchiveResult): ArrayBuffer { + const arrayBuffer = new ArrayBuffer(archive.buffer.byteLength); + new Uint8Array(arrayBuffer).set(archive.buffer); + return arrayBuffer; +} + +async function publishProjectArchiveDirect( + apiBaseUrl: string, + title: string, + archive: PublishArchiveResult, +): Promise { const body = new FormData(); body.set("title", title); - body.set("file", new File([archiveBytes], `${title}.zip`, { type: "application/zip" })); + body.set( + "file", + new File([archiveArrayBuffer(archive)], `${title}.zip`, { type: PUBLISH_CONTENT_TYPE }), + ); const headers: Record = { heygen_route: "canary", }; - const response = await fetch(`${getPublishApiBaseUrl()}/v1/hyperframes/projects/publish`, { + const response = await fetch(`${apiBaseUrl}/v1/hyperframes/projects/publish`, { method: "POST", body, headers, - signal: AbortSignal.timeout(30_000), + signal: AbortSignal.timeout(PUBLISH_REQUEST_TIMEOUT_MS), }); - const payload = await response.json().catch(() => null); - const message = - typeof payload?.message === "string" ? payload.message : "Failed to publish project"; - if (!response.ok || !payload?.data) { - throw new Error(message); + const payload = await readJson(response); + const publishedProject = parsePublishedProjectResponse(payload); + if (!response.ok || !publishedProject) { + throw new Error(await readErrorMessage(response, "Failed to publish project")); } - return { - projectId: String(payload.data.project_id), - title: String(payload.data.title), - fileCount: Number(payload.data.file_count), - url: String(payload.data.url), - claimToken: String(payload.data.claim_token), - }; + return publishedProject; +} + +async function publishProjectArchiveStaged( + apiBaseUrl: string, + title: string, + archive: PublishArchiveResult, +): Promise { + const fileName = `${title}.zip`; + const uploadResponse = await fetch(`${apiBaseUrl}/v1/hyperframes/projects/publish/upload`, { + method: "POST", + body: JSON.stringify({ + file_name: fileName, + content_type: PUBLISH_CONTENT_TYPE, + content_length: archive.buffer.byteLength, + }), + headers: { + "content-type": "application/json", + heygen_route: "canary", + }, + signal: AbortSignal.timeout(PUBLISH_REQUEST_TIMEOUT_MS), + }); + + if (uploadResponse.status === 404 || uploadResponse.status === 405) { + return null; + } + + const uploadPayload = await readJson(uploadResponse); + const stagedUpload = parseStagedUploadResponse(uploadPayload, archive.buffer.byteLength); + if (!uploadResponse.ok || !stagedUpload) { + throw new Error(await readErrorMessage(uploadResponse, "Failed to prepare project upload")); + } + + const s3Response = await fetch(stagedUpload.uploadUrl, { + method: "PUT", + body: new Blob([archiveArrayBuffer(archive)], { type: stagedUpload.contentType }), + headers: stagedUpload.uploadHeaders, + signal: AbortSignal.timeout(PUBLISH_REQUEST_TIMEOUT_MS), + }); + if (!s3Response.ok) { + throw new Error(await readErrorMessage(s3Response, "Failed to upload project archive")); + } + + const completeResponse = await fetch(`${apiBaseUrl}/v1/hyperframes/projects/publish/complete`, { + method: "POST", + body: JSON.stringify({ + upload_key: stagedUpload.uploadKey, + file_name: fileName, + title, + }), + headers: { + "content-type": "application/json", + heygen_route: "canary", + }, + signal: AbortSignal.timeout(PUBLISH_REQUEST_TIMEOUT_MS), + }); + + const completePayload = await readJson(completeResponse); + const publishedProject = parsePublishedProjectResponse(completePayload); + if (!completeResponse.ok || !publishedProject) { + throw new Error(await readErrorMessage(completeResponse, "Failed to publish project")); + } + + return publishedProject; +} + +export async function publishProjectArchive(projectDir: string): Promise { + const title = basename(projectDir); + const archive = createPublishArchive(projectDir); + const apiBaseUrl = getPublishApiBaseUrl(); + const stagedResult = await publishProjectArchiveStaged(apiBaseUrl, title, archive); + if (stagedResult) return stagedResult; + return publishProjectArchiveDirect(apiBaseUrl, title, archive); }