diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 334eee85..502e366c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,23 +48,40 @@ jobs: run: yarn lint - if: needs.changes.outputs.code == 'true' run: yarn build + + # Upload build output once — test jobs download it instead of rebuilding. + # Build output is platform-independent (compiled TS + bundled JS). + - if: needs.changes.outputs.code == 'true' && matrix['node-version'] == '22.x' && matrix['os'] == 'ubuntu-latest' + uses: actions/upload-artifact@v4 + with: + name: build-output + path: | + lib-es5/ + prelude/sea-bootstrap.bundle.js + retention-days: 1 + test_host: - needs: changes + needs: [changes, build] uses: ./.github/workflows/test.yml with: npm_command: test:host should_run: ${{ needs.changes.outputs.code }} + # Keep sequential — pnpm tests install global packages and SEA tests + # download binaries to a shared cache, both of which race under concurrency. + test_concurrency: '1' test_22: - needs: changes + needs: [changes, build] uses: ./.github/workflows/test.yml with: npm_command: test:22 should_run: ${{ needs.changes.outputs.code }} + test_concurrency: '4' test_24: - needs: changes + needs: [changes, build] uses: ./.github/workflows/test.yml with: npm_command: test:24 should_run: ${{ needs.changes.outputs.code }} + test_concurrency: '4' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 87f6b843..ef517837 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,6 +8,11 @@ on: type: string required: true default: 'install' + test_concurrency: + description: 'Number of tests to run in parallel (1 = sequential)' + type: string + required: false + default: '1' should_run: description: "Whether to actually execute the test. When 'false' the job runs but every step is skipped — this preserves the required-status-check contexts on docs-only PRs without burning CI minutes." type: string @@ -33,15 +38,23 @@ jobs: uses: actions/cache@v4 with: path: ~/.pkg-cache/ - key: ${{ matrix.os }}-${{ matrix.node-version }} + key: pkg-cache-${{ matrix.os }}-${{ matrix.node-version }} + restore-keys: | + pkg-cache-${{ matrix.os }}- + ${{ matrix.os }}-${{ matrix.node-version }} - if: inputs.should_run == 'true' run: yarn install + # Download pre-built lib-es5/ from the build job instead of rebuilding. - if: inputs.should_run == 'true' - run: yarn build + uses: actions/download-artifact@v4 + with: + name: build-output + - if: inputs.should_run == 'true' run: yarn ${{ inputs.npm_command }} env: CI: true + TEST_CONCURRENCY: ${{ inputs.test_concurrency }} timeout-minutes: 30 diff --git a/lib/index.ts b/lib/index.ts index bac3dd34..7ed66229 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -1,6 +1,7 @@ import assert from 'assert'; -import { existsSync, readFileSync, copyFileSync } from 'fs'; +import { existsSync, readFileSync, copyFileSync, renameSync, rmSync } from 'fs'; import { mkdir, readFile, rm, stat } from 'fs/promises'; +import { randomBytes } from 'crypto'; import minimist from 'minimist'; import { need, system } from '@yao-pkg/pkg-fetch'; import path from 'path'; @@ -627,18 +628,36 @@ export async function exec(argv2: string[]) { if (f.platform === 'macos') { // ad-hoc sign the base binary temporarily to generate bytecode - // due to the new mandatory signing requirement + // due to the new mandatory signing requirement. + // + // The signed binary is cached alongside the fetched binary. Concurrent + // pkg processes (e.g., parallel tests) used to race here — all of them + // would rm + copy + sign the same path, truncating each other's writes. + // Now: if the signed binary already exists, reuse it; otherwise write + // to a unique temp path and atomically rename. rename() replaces any + // existing target on POSIX, so racing writers don't corrupt readers + // (their open file handles remain valid). const signedBinaryPath = `${f.binaryPath}-signed`; - await rm(signedBinaryPath, { recursive: true, force: true }); - copyFileSync(f.binaryPath, signedBinaryPath); - try { - signMachOExecutable(signedBinaryPath); - } catch { - throw wasReported('Cannot generate bytecode', [ - 'pkg fails to run "codesign" utility. Due to the mandatory signing', - 'requirement of macOS, executables must be signed. Please ensure the', - 'utility is installed and properly configured.', - ]); + if (!existsSync(signedBinaryPath)) { + const tmpPath = `${signedBinaryPath}.tmp.${process.pid}.${randomBytes( + 4, + ).toString('hex')}`; + copyFileSync(f.binaryPath, tmpPath); + try { + signMachOExecutable(tmpPath); + renameSync(tmpPath, signedBinaryPath); + } catch { + try { + rmSync(tmpPath, { force: true }); + } catch { + /* ignore */ + } + throw wasReported('Cannot generate bytecode', [ + 'pkg fails to run "codesign" utility. Due to the mandatory signing', + 'requirement of macOS, executables must be signed. Please ensure the', + 'utility is installed and properly configured.', + ]); + } } f.binaryPath = signedBinaryPath; } diff --git a/lib/sea.ts b/lib/sea.ts index fab44034..3fe2f3d0 100644 --- a/lib/sea.ts +++ b/lib/sea.ts @@ -96,6 +96,14 @@ async function extract(os: string, archivePath: string): Promise { let nodePath = ''; if (os === 'win') { + nodePath = join(archiveDir, `${nodeDir}.exe`); + + // Skip extraction if already extracted — the node binary is immutable + // per archive, so re-extracting on every pkg invocation just wastes time. + if (await exists(nodePath)) { + return nodePath; + } + // use unzipper to extract the archive const { files } = await unzipper.Open.file(archivePath); const nodeBinPath = `${nodeDir}/node.exe`; @@ -106,12 +114,16 @@ async function extract(os: string, archivePath: string): Promise { throw new Error('Node executable not found in the archive'); } - nodePath = join(archiveDir, `${nodeDir}.exe`); - // extract the node executable await pipeline(nodeBin.stream(), createWriteStream(nodePath)); } else { const nodeBinPath = `${nodeDir}/bin/node`; + nodePath = join(archiveDir, nodeBinPath); + + // Skip extraction if already extracted (see above) + if (await exists(nodePath)) { + return nodePath; + } // use tar to extract the archive await tarExtract({ @@ -119,9 +131,6 @@ async function extract(os: string, archivePath: string): Promise { cwd: archiveDir, filter: (path) => path === nodeBinPath, }); - - // check if the node executable exists - nodePath = join(archiveDir, nodeBinPath); } // check if the node executable exists @@ -305,15 +314,16 @@ async function getNodejsExecutable( const filePath = join(downloadDir, fileName); - // skip download if file exists + // Skip download + checksum if the archive is already cached. Archives + // from nodejs.org are immutable, so re-verifying on every pkg invocation + // just re-hashes 100 MB for no benefit (and re-fetches SHASUMS256.txt). if (!(await exists(filePath))) { log.info(`Downloading nodejs executable from ${url}...`); await downloadFile(url, filePath); + log.info(`Verifying checksum of ${fileName}`); + await verifyChecksum(filePath, checksumUrl, fileName); } - log.info(`Verifying checksum of ${fileName}`); - await verifyChecksum(filePath, checksumUrl, fileName); - log.info(`Extracting node binary from ${fileName}`); const nodePath = await extract(os, filePath); diff --git a/test/test.js b/test/test.js index ca7b732e..7e562d36 100644 --- a/test/test.js +++ b/test/test.js @@ -2,11 +2,13 @@ 'use strict'; +const os = require('os'); const path = require('path'); const pc = require('picocolors'); const { globSync } = require('tinyglobby'); const utils = require('./utils.js'); const { spawn } = require('child_process'); +const { need } = require('@yao-pkg/pkg-fetch'); const host = 'node' + utils.getNodeMajorVersion(); let target = process.argv[2] || 'host'; if (target === 'host') target = host; @@ -22,12 +24,19 @@ const testFilter = process.argv[4] || (flavor.match(/^test/) ? flavor : null); const isCI = process.env.CI === 'true'; +// Concurrency for parallel test execution. Defaults to CPU count (capped at 4). +// Set TEST_CONCURRENCY=1 to run tests sequentially. +const concurrency = + parseInt(process.env.TEST_CONCURRENCY, 10) || + Math.min(os.availableParallelism?.() ?? os.cpus().length, 4); + console.log(''); console.log('*************************************'); console.log(target + ' ' + flavor); console.log( `Host Info: ${process.version} ${process.platform} ${process.arch}`, ); +console.log(`Concurrency: ${concurrency}`); console.log('*************************************'); console.log(''); @@ -73,7 +82,16 @@ const npmTests = [ 'test-99-#1135', 'test-99-#1191', 'test-99-#1192', + // SEA tests — they ignore the target argument (always build for the host + // Node version), so running them in both test:22 and test:24 is redundant. 'test-00-sea', + 'test-85-sea-enhanced', + 'test-86-sea-assets', + 'test-87-sea-esm', + 'test-89-sea-fs-ops', + 'test-90-sea-worker-threads', + 'test-91-sea-esm-entry', + 'test-92-sea-tla', ]; if (testFilter) { @@ -175,8 +193,12 @@ async function run() { let failed = []; const start = Date.now(); + const isParallel = concurrency > 1; + function addLog(log, isError = false) { - clearLastLine(); + // Only use TTY line-clearing in sequential mode — parallel output + // interleaves, so clearing lines would eat other tests' results. + if (!isParallel) clearLastLine(); if (isError) { console.error(log); } else { @@ -188,7 +210,7 @@ async function run() { file = path.resolve(file); const startTest = Date.now(); try { - if (!isCI && process.stdout.isTTY) { + if (!isParallel && !isCI && process.stdout.isTTY) { console.log(pc.gray(`⏳ ${file} - ${done}/${files.length}`)); } await runTest(file); @@ -212,9 +234,43 @@ async function run() { done++; }); - for (let i = 0; i < promises.length; i++) { - await promises[i](); + if (isParallel) { + // Pre-download pkg-fetch binaries for every platform/arch the tests + // could target. pkg-fetch uses a deterministic `*.downloading` temp + // filename that collides when multiple processes download the same + // binary concurrently, so we serialize fetches here via the pkg-fetch + // API (cleaner than spawning pkg, and skips pkg's codesign step). + const platforms = ['linux', 'macos', 'win']; + const arches = ['x64', 'arm64']; + console.log( + `Warming binary cache for ${target} (${platforms.length * arches.length} targets)...`, + ); + for (const platform of platforms) { + for (const arch of arches) { + try { + await need({ nodeRange: target, platform, arch }); + } catch (err) { + // Best-effort — if a particular combination isn't available, + // tests that need it will surface the error themselves. + console.log( + pc.gray(` skip ${target}-${platform}-${arch}: ${err.message}`), + ); + } + } + } + console.log('Binary cache ready.'); + } + + // Run tests with bounded concurrency + const executing = new Set(); + for (const task of promises) { + const p = task().finally(() => executing.delete(p)); + executing.add(p); + if (executing.size >= concurrency) { + await Promise.race(executing); + } } + await Promise.all(executing); const end = Date.now();