|
| 1 | +import { Tasks, env } from "@playfulprogramming/common"; |
| 2 | +import { db, profiles } from "@playfulprogramming/db"; |
| 3 | +import * as github from "@playfulprogramming/github-api"; |
| 4 | +import { s3 } from "@playfulprogramming/s3"; |
| 5 | +import { createProcessor } from "../../createProcessor.ts"; |
| 6 | +import matter from "gray-matter"; |
| 7 | +import { AuthorMetaSchema } from "./types.ts"; |
| 8 | +import { Value } from "@sinclair/typebox/value"; |
| 9 | +import sharp from "sharp"; |
| 10 | +import { Readable } from "node:stream"; |
| 11 | +import { eq } from "drizzle-orm"; |
| 12 | + |
| 13 | +const PROFILE_IMAGE_SIZE_MAX = 2048; |
| 14 | + |
| 15 | +async function processProfileImg( |
| 16 | + stream: ReadableStream<Uint8Array>, |
| 17 | + uploadKey: string, |
| 18 | +) { |
| 19 | + const pipeline = sharp() |
| 20 | + .resize({ |
| 21 | + width: PROFILE_IMAGE_SIZE_MAX, |
| 22 | + height: PROFILE_IMAGE_SIZE_MAX, |
| 23 | + fit: "inside", |
| 24 | + }) |
| 25 | + .jpeg({ mozjpeg: true }); |
| 26 | + |
| 27 | + Readable.fromWeb(stream as never).pipe(pipeline); |
| 28 | + |
| 29 | + const bucket = await s3.createBucket(env.S3_BUCKET); |
| 30 | + await s3.upload(bucket, uploadKey, undefined, pipeline, "image/jpeg"); |
| 31 | +} |
| 32 | + |
| 33 | +export default createProcessor(Tasks.SYNC_AUTHOR, async (job, { signal }) => { |
| 34 | + const authorId = job.data.author; |
| 35 | + const authorMetaUrl = new URL( |
| 36 | + `content/${encodeURIComponent(authorId)}/index.md`, |
| 37 | + "http://localhost", |
| 38 | + ); |
| 39 | + |
| 40 | + const authorMetaResponse = await github.getContentsRaw({ |
| 41 | + ref: job.data.ref, |
| 42 | + path: authorMetaUrl.pathname, |
| 43 | + repoOwner: env.GITHUB_REPO_OWNER, |
| 44 | + repoName: env.GITHUB_REPO_NAME, |
| 45 | + signal, |
| 46 | + }); |
| 47 | + |
| 48 | + if (authorMetaResponse.data === undefined) { |
| 49 | + if (authorMetaResponse.response.status == 404) { |
| 50 | + console.log( |
| 51 | + `Metadata for ${authorId} (${authorMetaUrl.pathname}) returned 404 - removing profile entry.`, |
| 52 | + ); |
| 53 | + await db.delete(profiles).where(eq(profiles.slug, authorId)); |
| 54 | + return; |
| 55 | + } |
| 56 | + |
| 57 | + throw new Error(`Unable to fetch author data for ${authorId}`); |
| 58 | + } |
| 59 | + |
| 60 | + const { data } = matter(authorMetaResponse.data); |
| 61 | + const authorData = Value.Parse(AuthorMetaSchema, data); |
| 62 | + |
| 63 | + let profileImgKey: string | null = null; |
| 64 | + if (authorData.profileImg) { |
| 65 | + const profileImgUrl = new URL(authorData.profileImg, authorMetaUrl); |
| 66 | + const { data: profileImgStream } = await github.getContentsRawStream({ |
| 67 | + ref: job.data.ref, |
| 68 | + path: profileImgUrl.pathname, |
| 69 | + repoOwner: env.GITHUB_REPO_OWNER, |
| 70 | + repoName: env.GITHUB_REPO_NAME, |
| 71 | + signal, |
| 72 | + }); |
| 73 | + |
| 74 | + if (profileImgStream === null || typeof profileImgStream === "undefined") { |
| 75 | + throw new Error( |
| 76 | + `Unable to fetch profile image for ${authorId} (${profileImgUrl.pathname})`, |
| 77 | + ); |
| 78 | + } |
| 79 | + |
| 80 | + profileImgKey = `profiles/${authorId}.jpeg`; |
| 81 | + await processProfileImg(profileImgStream, profileImgKey); |
| 82 | + } |
| 83 | + |
| 84 | + const result = { |
| 85 | + slug: authorId, |
| 86 | + name: authorData.name, |
| 87 | + description: authorData.description, |
| 88 | + profileImage: profileImgKey, |
| 89 | + meta: { |
| 90 | + socials: authorData.socials, |
| 91 | + roles: authorData.roles, |
| 92 | + }, |
| 93 | + }; |
| 94 | + |
| 95 | + await db |
| 96 | + .insert(profiles) |
| 97 | + .values(result) |
| 98 | + .onConflictDoUpdate({ target: profiles.slug, set: result }); |
| 99 | +}); |
0 commit comments