diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 19c987905e..4eaab310d2 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -117761,12 +117761,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -118135,7 +118164,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -118170,6 +118199,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/analyze-action.js b/lib/analyze-action.js index af3fb58319..06b857bbdf 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -90112,29 +90112,6 @@ var persistInputs = function() { ); core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; -function getPullRequestBranches() { - const pullRequest = github.context.payload.pull_request; - if (pullRequest) { - return { - base: pullRequest.base.ref, - // We use the head label instead of the head ref here, because the head - // ref lacks owner information and by itself does not uniquely identify - // the head branch (which may be in a forked repository). - head: pullRequest.head.label - }; - } - const codeScanningRef = process.env.CODE_SCANNING_REF; - const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH; - if (codeScanningRef && codeScanningBaseBranch) { - return { - base: codeScanningBaseBranch, - // PR analysis under Default Setup analyzes the PR head commit instead of - // the merge commit, so we can use the provided ref directly. - head: codeScanningRef - }; - } - return void 0; -} var qualityCategoryMapping = { "c#": "csharp", cpp: "c-cpp", @@ -90193,6 +90170,9 @@ var path16 = __toESM(require("path")); var import_perf_hooks2 = require("perf_hooks"); var io5 = __toESM(require_io()); +// src/autobuild.ts +var core11 = __toESM(require_core()); + // src/api-client.ts var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils4()); @@ -90362,9 +90342,6 @@ function wrapApiConfigurationError(e) { return e; } -// src/autobuild.ts -var core11 = __toESM(require_core()); - // src/codeql.ts var fs14 = __toESM(require("fs")); var path14 = __toESM(require("path")); @@ -90941,12 +90918,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -91534,34 +91540,9 @@ var GitHubFeatureFlags = class { }; // src/diff-informed-analysis-utils.ts -async function getDiffInformedAnalysisBranches(codeql, features, logger) { - if (!await features.getValue("diff_informed_queries" /* DiffInformedQueries */, codeql)) { - return void 0; - } - const gitHubVersion = await getGitHubVersion(); - if (gitHubVersion.type === 1 /* GHES */ && satisfiesGHESVersion(gitHubVersion.version, "<3.19", true)) { - return void 0; - } - const branches = getPullRequestBranches(); - if (!branches) { - logger.info( - "Not performing diff-informed analysis because we are not analyzing a pull request." - ); - } - return branches; -} function getDiffRangesJsonFilePath() { return path9.join(getTemporaryDirectory(), "pr-diff-range.json"); } -function writeDiffRangesJsonFile(logger, ranges) { - const jsonContents = JSON.stringify(ranges, null, 2); - const jsonFilePath = getDiffRangesJsonFilePath(); - fs8.writeFileSync(jsonFilePath, jsonContents); - logger.debug( - `Wrote pr-diff-range JSON file to ${jsonFilePath}: -${jsonContents}` - ); -} function readDiffRangesJsonFile(logger) { const jsonFilePath = getDiffRangesJsonFilePath(); if (!fs8.existsSync(jsonFilePath)) { @@ -92832,7 +92813,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -92867,6 +92848,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -93636,14 +93618,31 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag, trap_import_duration_ms: Math.round(trapImportTime) }; } -async function setupDiffInformedQueryRun(branches, logger) { +async function setupDiffInformedQueryRun(logger) { return await withGroupAsync( "Generating diff range extension pack", async () => { + let diffRanges; + try { + diffRanges = readDiffRangesJsonFile(logger); + } catch (e) { + logger.debug( + `Failed to read precomputed diff ranges: ${getErrorMessage(e)}` + ); + diffRanges = void 0; + } + if (diffRanges === void 0) { + logger.info( + "No precomputed diff ranges found; skipping diff-informed analysis stage." + ); + return void 0; + } + const fileCount = new Set( + diffRanges.filter((r) => r.path).map((r) => r.path) + ).size; logger.info( - `Calculating diff ranges for ${branches.base}...${branches.head}` + `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).` ); - const diffRanges = await getPullRequestEditedDiffRanges(branches, logger); const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); if (packDir === void 0) { logger.warning( @@ -93658,117 +93657,6 @@ async function setupDiffInformedQueryRun(branches, logger) { } ); } -async function getPullRequestEditedDiffRanges(branches, logger) { - const fileDiffs = await getFileDiffsWithBasehead(branches, logger); - if (fileDiffs === void 0) { - return void 0; - } - if (fileDiffs.length >= 300) { - logger.warning( - `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` - ); - return void 0; - } - const results = []; - for (const filediff of fileDiffs) { - const diffRanges = getDiffRanges(filediff, logger); - if (diffRanges === void 0) { - return void 0; - } - results.push(...diffRanges); - } - return results; -} -async function getFileDiffsWithBasehead(branches, logger) { - const repositoryNwo = getRepositoryNwoFromEnv( - "CODE_SCANNING_REPOSITORY", - "GITHUB_REPOSITORY" - ); - const basehead = `${branches.base}...${branches.head}`; - try { - const response = await getApiClient().rest.repos.compareCommitsWithBasehead( - { - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - basehead, - per_page: 1 - } - ); - logger.debug( - `Response from compareCommitsWithBasehead(${basehead}): -${JSON.stringify(response, null, 2)}` - ); - return response.data.files; - } catch (error2) { - if (error2.status) { - logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); - logger.debug( - `Error running compareCommitsWithBasehead(${basehead}): -Request: ${JSON.stringify(error2.request, null, 2)} -Error Response: ${JSON.stringify(error2.response, null, 2)}` - ); - return void 0; - } else { - throw error2; - } - } -} -function getDiffRanges(fileDiff, logger) { - const filename = path16.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path16.sep, "/"); - if (fileDiff.patch === void 0) { - if (fileDiff.changes === 0) { - return []; - } - return [ - { - path: filename, - startLine: 0, - endLine: 0 - } - ]; - } - let currentLine = 0; - let additionRangeStartLine = void 0; - const diffRanges = []; - const diffLines = fileDiff.patch.split("\n"); - diffLines.push(" "); - for (const diffLine of diffLines) { - if (diffLine.startsWith("-")) { - continue; - } - if (diffLine.startsWith("+")) { - if (additionRangeStartLine === void 0) { - additionRangeStartLine = currentLine; - } - currentLine++; - continue; - } - if (additionRangeStartLine !== void 0) { - diffRanges.push({ - path: filename, - startLine: additionRangeStartLine, - endLine: currentLine - 1 - }); - additionRangeStartLine = void 0; - } - if (diffLine.startsWith("@@ ")) { - const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); - if (match === null) { - logger.warning( - `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` - ); - return void 0; - } - currentLine = parseInt(match[1], 10); - continue; - } - if (diffLine.startsWith(" ")) { - currentLine++; - continue; - } - } - return diffRanges; -} function writeDiffRangeDataExtensionPack(logger, ranges) { if (ranges === void 0) { return void 0; @@ -93817,7 +93705,6 @@ extensions: `Wrote pr-diff-range extension pack to ${extensionFilePath}: ${extensionContents}` ); - writeDiffRangesJsonFile(logger, ranges); return diffRangeDir; } var defaultSuites = /* @__PURE__ */ new Set([ @@ -96206,12 +96093,7 @@ async function run() { getOptionalInput("ram") || process.env["CODEQL_RAM"], logger ); - const branches = await getDiffInformedAnalysisBranches( - codeql, - features, - logger - ); - const diffRangePackDir = branches ? await setupDiffInformedQueryRun(branches, logger) : void 0; + const diffRangePackDir = await setupDiffInformedQueryRun(logger); await warnIfGoInstalledAfterInit(config, logger); await runAutobuildIfLegacyGoWorkflow(config, logger); dbCreationTimings = await runFinalize( diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index adf440738a..4e82146071 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -78501,12 +78501,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -79170,7 +79199,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -79205,6 +79234,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 08c8449012..fd8f7d18ab 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -129185,12 +129185,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -130781,7 +130810,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -130816,6 +130845,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/init-action.js b/lib/init-action.js index 5481ab1e8f..056562fbb0 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -185,7 +185,7 @@ var require_file_command = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var os5 = __importStar4(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -193,10 +193,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs18.existsSync(filePath)) { + if (!fs19.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { + fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { encoding: "utf8" }); } @@ -999,14 +999,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}`; - let path20 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path21 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path20 && !path20.startsWith("/")) { - path20 = `/${path20}`; + if (path21 && !path21.startsWith("/")) { + path21 = `/${path21}`; } - url = new URL(origin + path20); + url = new URL(origin + path21); } return url; } @@ -2620,20 +2620,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename(path20) { - if (typeof path20 !== "string") { + module2.exports = function basename(path21) { + if (typeof path21 !== "string") { return ""; } - for (var i = path20.length - 1; i >= 0; --i) { - switch (path20.charCodeAt(i)) { + for (var i = path21.length - 1; i >= 0; --i) { + switch (path21.charCodeAt(i)) { case 47: // '/' case 92: - path20 = path20.slice(i + 1); - return path20 === ".." || path20 === "." ? "" : path20; + path21 = path21.slice(i + 1); + return path21 === ".." || path21 === "." ? "" : path21; } } - return path20 === ".." || path20 === "." ? "" : path20; + return path21 === ".." || path21 === "." ? "" : path21; }; } }); @@ -5663,7 +5663,7 @@ var require_request = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path20, + path: path21, method, body, headers, @@ -5677,11 +5677,11 @@ var require_request = __commonJS({ throwOnError, expectContinue }, handler) { - if (typeof path20 !== "string") { + if (typeof path21 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path20[0] !== "/" && !(path20.startsWith("http://") || path20.startsWith("https://")) && method !== "CONNECT") { + } else if (path21[0] !== "/" && !(path21.startsWith("http://") || path21.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path20) !== null) { + } else if (invalidPathRegex.exec(path21) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -5744,7 +5744,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path20, query) : path20; + this.path = query ? util.buildURL(path21, query) : path21; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6752,9 +6752,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path20 = search ? `${pathname}${search}` : pathname; + const path21 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path20; + this.opts.path = path21; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -7994,7 +7994,7 @@ var require_client = __commonJS({ writeH2(client, client[kHTTP2Session], request); return; } - const { body, method, path: path20, host, upgrade, headers, blocking, reset } = request; + const { body, method, path: path21, host, upgrade, headers, blocking, reset } = request; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -8044,7 +8044,7 @@ var require_client = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path20} HTTP/1.1\r + let header = `${method} ${path21} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -8107,7 +8107,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request) { - const { body, method, path: path20, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; + const { body, method, path: path21, host, upgrade, expectContinue, signal, headers: reqHeaders } = request; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -8150,7 +8150,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path20; + headers[HTTP2_HEADER_PATH] = path21; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -10390,20 +10390,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path20) { - if (typeof path20 !== "string") { - return path20; + function safeUrl(path21) { + if (typeof path21 !== "string") { + return path21; } - const pathSegments = path20.split("?"); + const pathSegments = path21.split("?"); if (pathSegments.length !== 2) { - return path20; + return path21; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path20, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path20); + function matchKey(mockDispatch2, { path: path21, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path21); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10421,7 +10421,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path20 }) => matchValue(safeUrl(path20), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path21 }) => matchValue(safeUrl(path21), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10458,9 +10458,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path20, method, body, headers, query } = opts; + const { path: path21, method, body, headers, query } = opts; return { - path: path20, + path: path21, method, body, headers, @@ -10909,10 +10909,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path20, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path21, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path20, + Path: path21, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -15532,8 +15532,8 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path20) { - for (const char of path20) { + function validateCookiePath(path21) { + for (const char of path21) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -17213,11 +17213,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path20 = opts.path; + let path21 = opts.path; if (!opts.path.startsWith("/")) { - path20 = `/${path20}`; + path21 = `/${path21}`; } - url = new URL(util.parseOrigin(url).origin + path20); + url = new URL(util.parseOrigin(url).origin + path21); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -18440,7 +18440,7 @@ var require_path_utils = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -18450,7 +18450,7 @@ var require_path_utils = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path20.sep); + return pth.replace(/[/\\]/g, path21.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs18 = __importStar4(require("fs")); - var path20 = __importStar4(require("path")); - _a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs19 = __importStar4(require("fs")); + var path21 = __importStar4(require("path")); + _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs18.constants.O_RDONLY; + exports2.READONLY = fs19.constants.O_RDONLY; function exists(fsPath) { return __awaiter4(this, void 0, void 0, function* () { try { @@ -18563,7 +18563,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path20.extname(filePath).toUpperCase(); + const upperExt = path21.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -18587,11 +18587,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path20.dirname(filePath); - const upperName = path20.basename(filePath).toUpperCase(); + const directory = path21.dirname(filePath); + const upperName = path21.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path20.join(directory, actualName); + filePath = path21.join(directory, actualName); break; } } @@ -18686,7 +18686,7 @@ var require_io = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var ioUtil = __importStar4(require_io_util()); function cp(source, dest, options = {}) { return __awaiter4(this, void 0, void 0, function* () { @@ -18695,7 +18695,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path20.join(dest, path20.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path21.join(dest, path21.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -18707,7 +18707,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path20.relative(source, newDest) === "") { + if (path21.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); @@ -18720,7 +18720,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path20.join(dest, path20.basename(source)); + dest = path21.join(dest, path21.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -18731,7 +18731,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path20.dirname(dest)); + yield mkdirP(path21.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -18794,7 +18794,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path20.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path21.delimiter)) { if (extension) { extensions.push(extension); } @@ -18807,12 +18807,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path20.sep)) { + if (tool.includes(path21.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path20.delimiter)) { + for (const p of process.env.PATH.split(path21.delimiter)) { if (p) { directories.push(p); } @@ -18820,7 +18820,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path20.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path21.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -18936,7 +18936,7 @@ var require_toolrunner = __commonJS({ var os5 = __importStar4(require("os")); var events = __importStar4(require("events")); var child = __importStar4(require("child_process")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var io7 = __importStar4(require_io()); var ioUtil = __importStar4(require_io_util()); var timers_1 = require("timers"); @@ -19151,7 +19151,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter4(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path20.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path21.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve9, reject) => __awaiter4(this, void 0, void 0, function* () { @@ -19651,7 +19651,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -19679,7 +19679,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path20.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path21.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath2; function getInput2(name, options) { @@ -21743,8 +21743,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path20 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path20} does not exist${os_1.EOL}`); + const path21 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path21} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -26505,7 +26505,7 @@ var require_path = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.convertPosixPathToPattern = exports2.convertWindowsPathToPattern = exports2.convertPathToPattern = exports2.escapePosixPath = exports2.escapeWindowsPath = exports2.escape = exports2.removeLeadingDotSegment = exports2.makeAbsolute = exports2.unixify = void 0; var os5 = require("os"); - var path20 = require("path"); + var path21 = require("path"); var IS_WINDOWS_PLATFORM = os5.platform() === "win32"; var LEADING_DOT_SEGMENT_CHARACTERS_COUNT = 2; var POSIX_UNESCAPED_GLOB_SYMBOLS_RE = /(\\?)([()*?[\]{|}]|^!|[!+@](?=\()|\\(?![!()*+?@[\]{|}]))/g; @@ -26517,7 +26517,7 @@ var require_path = __commonJS({ } exports2.unixify = unixify; function makeAbsolute(cwd, filepath) { - return path20.resolve(cwd, filepath); + return path21.resolve(cwd, filepath); } exports2.makeAbsolute = makeAbsolute; function removeLeadingDotSegment(entry) { @@ -27814,7 +27814,7 @@ var require_braces = __commonJS({ var require_constants8 = __commonJS({ "node_modules/picomatch/lib/constants.js"(exports2, module2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var WIN_SLASH = "\\\\/"; var WIN_NO_SLASH = `[^${WIN_SLASH}]`; var DOT_LITERAL = "\\."; @@ -27984,7 +27984,7 @@ var require_constants8 = __commonJS({ /* | */ CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */ - SEP: path20.sep, + SEP: path21.sep, /** * Create EXTGLOB_CHARS */ @@ -28011,7 +28011,7 @@ var require_constants8 = __commonJS({ var require_utils6 = __commonJS({ "node_modules/picomatch/lib/utils.js"(exports2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var win32 = process.platform === "win32"; var { REGEX_BACKSLASH, @@ -28040,7 +28040,7 @@ var require_utils6 = __commonJS({ if (options && typeof options.windows === "boolean") { return options.windows; } - return win32 === true || path20.sep === "\\"; + return win32 === true || path21.sep === "\\"; }; exports2.escapeLast = (input, char, lastIdx) => { const idx = input.lastIndexOf(char, lastIdx); @@ -29175,7 +29175,7 @@ var require_parse4 = __commonJS({ var require_picomatch = __commonJS({ "node_modules/picomatch/lib/picomatch.js"(exports2, module2) { "use strict"; - var path20 = require("path"); + var path21 = require("path"); var scan = require_scan(); var parse = require_parse4(); var utils = require_utils6(); @@ -29260,7 +29260,7 @@ var require_picomatch = __commonJS({ }; picomatch.matchBase = (input, glob2, options, posix = utils.isWindows(options)) => { const regex = glob2 instanceof RegExp ? glob2 : picomatch.makeRe(glob2, options); - return regex.test(path20.basename(input)); + return regex.test(path21.basename(input)); }; picomatch.isMatch = (str2, patterns, options) => picomatch(patterns, options)(str2); picomatch.parse = (pattern, options) => { @@ -29487,7 +29487,7 @@ var require_pattern = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.isAbsolute = exports2.partitionAbsoluteAndRelative = exports2.removeDuplicateSlashes = exports2.matchAny = exports2.convertPatternsToRe = exports2.makeRe = exports2.getPatternParts = exports2.expandBraceExpansion = exports2.expandPatternsWithBraceExpansion = exports2.isAffectDepthOfReadingPattern = exports2.endsWithSlashGlobStar = exports2.hasGlobStar = exports2.getBaseDirectory = exports2.isPatternRelatedToParentDirectory = exports2.getPatternsOutsideCurrentDirectory = exports2.getPatternsInsideCurrentDirectory = exports2.getPositivePatterns = exports2.getNegativePatterns = exports2.isPositivePattern = exports2.isNegativePattern = exports2.convertToNegativePattern = exports2.convertToPositivePattern = exports2.isDynamicPattern = exports2.isStaticPattern = void 0; - var path20 = require("path"); + var path21 = require("path"); var globParent = require_glob_parent(); var micromatch = require_micromatch(); var GLOBSTAR = "**"; @@ -29582,7 +29582,7 @@ var require_pattern = __commonJS({ } exports2.endsWithSlashGlobStar = endsWithSlashGlobStar; function isAffectDepthOfReadingPattern(pattern) { - const basename = path20.basename(pattern); + const basename = path21.basename(pattern); return endsWithSlashGlobStar(pattern) || isStaticPattern(basename); } exports2.isAffectDepthOfReadingPattern = isAffectDepthOfReadingPattern; @@ -29640,7 +29640,7 @@ var require_pattern = __commonJS({ } exports2.partitionAbsoluteAndRelative = partitionAbsoluteAndRelative; function isAbsolute3(pattern) { - return path20.isAbsolute(pattern); + return path21.isAbsolute(pattern); } exports2.isAbsolute = isAbsolute3; } @@ -29815,10 +29815,10 @@ var require_utils7 = __commonJS({ exports2.array = array; var errno = require_errno(); exports2.errno = errno; - var fs18 = require_fs(); - exports2.fs = fs18; - var path20 = require_path(); - exports2.path = path20; + var fs19 = require_fs(); + exports2.fs = fs19; + var path21 = require_path(); + exports2.path = path21; var pattern = require_pattern(); exports2.pattern = pattern; var stream2 = require_stream(); @@ -29930,8 +29930,8 @@ var require_async = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.read = void 0; - function read(path20, settings, callback) { - settings.fs.lstat(path20, (lstatError, lstat) => { + function read(path21, settings, callback) { + settings.fs.lstat(path21, (lstatError, lstat) => { if (lstatError !== null) { callFailureCallback(callback, lstatError); return; @@ -29940,7 +29940,7 @@ var require_async = __commonJS({ callSuccessCallback(callback, lstat); return; } - settings.fs.stat(path20, (statError, stat) => { + settings.fs.stat(path21, (statError, stat) => { if (statError !== null) { if (settings.throwErrorOnBrokenSymbolicLink) { callFailureCallback(callback, statError); @@ -29972,13 +29972,13 @@ var require_sync = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.read = void 0; - function read(path20, settings) { - const lstat = settings.fs.lstatSync(path20); + function read(path21, settings) { + const lstat = settings.fs.lstatSync(path21); if (!lstat.isSymbolicLink() || !settings.followSymbolicLink) { return lstat; } try { - const stat = settings.fs.statSync(path20); + const stat = settings.fs.statSync(path21); if (settings.markSymbolicLink) { stat.isSymbolicLink = () => true; } @@ -30000,12 +30000,12 @@ var require_fs2 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); exports2.FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - stat: fs18.stat, - lstatSync: fs18.lstatSync, - statSync: fs18.statSync + lstat: fs19.lstat, + stat: fs19.stat, + lstatSync: fs19.lstatSync, + statSync: fs19.statSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === void 0) { @@ -30022,12 +30022,12 @@ var require_settings = __commonJS({ "node_modules/@nodelib/fs.stat/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var fs18 = require_fs2(); + var fs19 = require_fs2(); var Settings = class { constructor(_options = {}) { this._options = _options; this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true); - this.fs = fs18.createFileSystemAdapter(this._options.fs); + this.fs = fs19.createFileSystemAdapter(this._options.fs); this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); } @@ -30049,17 +30049,17 @@ var require_out = __commonJS({ var sync = require_sync(); var settings_1 = require_settings(); exports2.Settings = settings_1.default; - function stat(path20, optionsOrSettingsOrCallback, callback) { + function stat(path21, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === "function") { - async.read(path20, getSettings(), optionsOrSettingsOrCallback); + async.read(path21, getSettings(), optionsOrSettingsOrCallback); return; } - async.read(path20, getSettings(optionsOrSettingsOrCallback), callback); + async.read(path21, getSettings(optionsOrSettingsOrCallback), callback); } exports2.stat = stat; - function statSync2(path20, optionsOrSettings) { + function statSync2(path21, optionsOrSettings) { const settings = getSettings(optionsOrSettings); - return sync.read(path20, settings); + return sync.read(path21, settings); } exports2.statSync = statSync2; function getSettings(settingsOrOptions = {}) { @@ -30182,8 +30182,8 @@ var require_utils8 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.fs = void 0; - var fs18 = require_fs3(); - exports2.fs = fs18; + var fs19 = require_fs3(); + exports2.fs = fs19; } }); @@ -30275,16 +30275,16 @@ var require_async2 = __commonJS({ return; } const tasks = names.map((name) => { - const path20 = common2.joinPathSegments(directory, name, settings.pathSegmentSeparator); + const path21 = common2.joinPathSegments(directory, name, settings.pathSegmentSeparator); return (done) => { - fsStat.stat(path20, settings.fsStatSettings, (error2, stats) => { + fsStat.stat(path21, settings.fsStatSettings, (error2, stats) => { if (error2 !== null) { done(error2); return; } const entry = { name, - path: path20, + path: path21, dirent: utils.fs.createDirentFromStats(name, stats) }; if (settings.stats) { @@ -30378,14 +30378,14 @@ var require_fs4 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); exports2.FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - stat: fs18.stat, - lstatSync: fs18.lstatSync, - statSync: fs18.statSync, - readdir: fs18.readdir, - readdirSync: fs18.readdirSync + lstat: fs19.lstat, + stat: fs19.stat, + lstatSync: fs19.lstatSync, + statSync: fs19.statSync, + readdir: fs19.readdir, + readdirSync: fs19.readdirSync }; function createFileSystemAdapter(fsMethods) { if (fsMethods === void 0) { @@ -30402,15 +30402,15 @@ var require_settings2 = __commonJS({ "node_modules/@nodelib/fs.scandir/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsStat = require_out(); - var fs18 = require_fs4(); + var fs19 = require_fs4(); var Settings = class { constructor(_options = {}) { this._options = _options; this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false); - this.fs = fs18.createFileSystemAdapter(this._options.fs); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path20.sep); + this.fs = fs19.createFileSystemAdapter(this._options.fs); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path21.sep); this.stats = this._getValue(this._options.stats, false); this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true); this.fsStatSettings = new fsStat.Settings({ @@ -30437,17 +30437,17 @@ var require_out2 = __commonJS({ var sync = require_sync2(); var settings_1 = require_settings2(); exports2.Settings = settings_1.default; - function scandir(path20, optionsOrSettingsOrCallback, callback) { + function scandir(path21, optionsOrSettingsOrCallback, callback) { if (typeof optionsOrSettingsOrCallback === "function") { - async.read(path20, getSettings(), optionsOrSettingsOrCallback); + async.read(path21, getSettings(), optionsOrSettingsOrCallback); return; } - async.read(path20, getSettings(optionsOrSettingsOrCallback), callback); + async.read(path21, getSettings(optionsOrSettingsOrCallback), callback); } exports2.scandir = scandir; - function scandirSync(path20, optionsOrSettings) { + function scandirSync(path21, optionsOrSettings) { const settings = getSettings(optionsOrSettings); - return sync.read(path20, settings); + return sync.read(path21, settings); } exports2.scandirSync = scandirSync; function getSettings(settingsOrOptions = {}) { @@ -30974,7 +30974,7 @@ var require_settings3 = __commonJS({ "node_modules/@nodelib/fs.walk/out/settings.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsScandir = require_out2(); var Settings = class { constructor(_options = {}) { @@ -30984,7 +30984,7 @@ var require_settings3 = __commonJS({ this.deepFilter = this._getValue(this._options.deepFilter, null); this.entryFilter = this._getValue(this._options.entryFilter, null); this.errorFilter = this._getValue(this._options.errorFilter, null); - this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path20.sep); + this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path21.sep); this.fsScandirSettings = new fsScandir.Settings({ followSymbolicLinks: this._options.followSymbolicLinks, fs: this._options.fs, @@ -31046,7 +31046,7 @@ var require_reader2 = __commonJS({ "node_modules/fast-glob/out/readers/reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var fsStat = require_out(); var utils = require_utils7(); var Reader = class { @@ -31059,7 +31059,7 @@ var require_reader2 = __commonJS({ }); } _getFullEntryPath(filepath) { - return path20.resolve(this._settings.cwd, filepath); + return path21.resolve(this._settings.cwd, filepath); } _makeEntry(stats, pattern) { const entry = { @@ -31475,7 +31475,7 @@ var require_provider = __commonJS({ "node_modules/fast-glob/out/providers/provider.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var path20 = require("path"); + var path21 = require("path"); var deep_1 = require_deep(); var entry_1 = require_entry(); var error_1 = require_error(); @@ -31489,7 +31489,7 @@ var require_provider = __commonJS({ this.entryTransformer = new entry_2.default(this._settings); } _getRootDirectory(task) { - return path20.resolve(this._settings.cwd, task.base); + return path21.resolve(this._settings.cwd, task.base); } _getReaderOptions(task) { const basePath = task.base === "." ? "" : task.base; @@ -31670,16 +31670,16 @@ var require_settings4 = __commonJS({ "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DEFAULT_FILE_SYSTEM_ADAPTER = void 0; - var fs18 = require("fs"); + var fs19 = require("fs"); var os5 = require("os"); var CPU_COUNT = Math.max(os5.cpus().length, 1); exports2.DEFAULT_FILE_SYSTEM_ADAPTER = { - lstat: fs18.lstat, - lstatSync: fs18.lstatSync, - stat: fs18.stat, - statSync: fs18.statSync, - readdir: fs18.readdir, - readdirSync: fs18.readdirSync + lstat: fs19.lstat, + lstatSync: fs19.lstatSync, + stat: fs19.stat, + statSync: fs19.statSync, + readdir: fs19.readdir, + readdirSync: fs19.readdirSync }; var Settings = class { constructor(_options = {}) { @@ -32122,7 +32122,7 @@ var require_ignore = __commonJS({ // path matching. // - check `string` either `MODE_IGNORE` or `MODE_CHECK_IGNORE` // @returns {TestResult} true if a file is ignored - test(path20, checkUnignored, mode) { + test(path21, checkUnignored, mode) { let ignored = false; let unignored = false; let matchedRule; @@ -32131,7 +32131,7 @@ var require_ignore = __commonJS({ if (unignored === negative && ignored !== unignored || negative && !ignored && !unignored && !checkUnignored) { return; } - const matched = rule[mode].test(path20); + const matched = rule[mode].test(path21); if (!matched) { return; } @@ -32152,17 +32152,17 @@ var require_ignore = __commonJS({ var throwError2 = (message, Ctor) => { throw new Ctor(message); }; - var checkPath = (path20, originalPath, doThrow) => { - if (!isString(path20)) { + var checkPath = (path21, originalPath, doThrow) => { + if (!isString(path21)) { return doThrow( `path must be a string, but got \`${originalPath}\``, TypeError ); } - if (!path20) { + if (!path21) { return doThrow(`path must not be empty`, TypeError); } - if (checkPath.isNotRelative(path20)) { + if (checkPath.isNotRelative(path21)) { const r = "`path.relative()`d"; return doThrow( `path should be a ${r} string, but got "${originalPath}"`, @@ -32171,7 +32171,7 @@ var require_ignore = __commonJS({ } return true; }; - var isNotRelative = (path20) => REGEX_TEST_INVALID_PATH.test(path20); + var isNotRelative = (path21) => REGEX_TEST_INVALID_PATH.test(path21); checkPath.isNotRelative = isNotRelative; checkPath.convert = (p) => p; var Ignore = class { @@ -32201,19 +32201,19 @@ var require_ignore = __commonJS({ } // @returns {TestResult} _test(originalPath, cache, checkUnignored, slices) { - const path20 = originalPath && checkPath.convert(originalPath); + const path21 = originalPath && checkPath.convert(originalPath); checkPath( - path20, + path21, originalPath, this._strictPathCheck ? throwError2 : RETURN_FALSE ); - return this._t(path20, cache, checkUnignored, slices); + return this._t(path21, cache, checkUnignored, slices); } - checkIgnore(path20) { - if (!REGEX_TEST_TRAILING_SLASH.test(path20)) { - return this.test(path20); + checkIgnore(path21) { + if (!REGEX_TEST_TRAILING_SLASH.test(path21)) { + return this.test(path21); } - const slices = path20.split(SLASH).filter(Boolean); + const slices = path21.split(SLASH).filter(Boolean); slices.pop(); if (slices.length) { const parent = this._t( @@ -32226,18 +32226,18 @@ var require_ignore = __commonJS({ return parent; } } - return this._rules.test(path20, false, MODE_CHECK_IGNORE); + return this._rules.test(path21, false, MODE_CHECK_IGNORE); } - _t(path20, cache, checkUnignored, slices) { - if (path20 in cache) { - return cache[path20]; + _t(path21, cache, checkUnignored, slices) { + if (path21 in cache) { + return cache[path21]; } if (!slices) { - slices = path20.split(SLASH).filter(Boolean); + slices = path21.split(SLASH).filter(Boolean); } slices.pop(); if (!slices.length) { - return cache[path20] = this._rules.test(path20, checkUnignored, MODE_IGNORE); + return cache[path21] = this._rules.test(path21, checkUnignored, MODE_IGNORE); } const parent = this._t( slices.join(SLASH) + SLASH, @@ -32245,29 +32245,29 @@ var require_ignore = __commonJS({ checkUnignored, slices ); - return cache[path20] = parent.ignored ? parent : this._rules.test(path20, checkUnignored, MODE_IGNORE); + return cache[path21] = parent.ignored ? parent : this._rules.test(path21, checkUnignored, MODE_IGNORE); } - ignores(path20) { - return this._test(path20, this._ignoreCache, false).ignored; + ignores(path21) { + return this._test(path21, this._ignoreCache, false).ignored; } createFilter() { - return (path20) => !this.ignores(path20); + return (path21) => !this.ignores(path21); } filter(paths) { return makeArray(paths).filter(this.createFilter()); } // @returns {TestResult} - test(path20) { - return this._test(path20, this._testCache, true); + test(path21) { + return this._test(path21, this._testCache, true); } }; var factory = (options) => new Ignore(options); - var isPathValid = (path20) => checkPath(path20 && checkPath.convert(path20), path20, RETURN_FALSE); + var isPathValid = (path21) => checkPath(path21 && checkPath.convert(path21), path21, RETURN_FALSE); var setupWindows = () => { const makePosix = (str2) => /^\\\\\?\\/.test(str2) || /["<>|\u0000-\u001F]+/u.test(str2) ? str2 : str2.replace(/\\/g, "/"); checkPath.convert = makePosix; const REGEX_TEST_WINDOWS_PATH_ABSOLUTE = /^[a-z]:\//i; - checkPath.isNotRelative = (path20) => REGEX_TEST_WINDOWS_PATH_ABSOLUTE.test(path20) || isNotRelative(path20); + checkPath.isNotRelative = (path21) => REGEX_TEST_WINDOWS_PATH_ABSOLUTE.test(path21) || isNotRelative(path21); }; if ( // Detect `process` so that it can run in browsers. @@ -34052,7 +34052,7 @@ var require_internal_path_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname3(p) { @@ -34060,7 +34060,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path20.dirname(p); + let result = path21.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -34098,7 +34098,7 @@ var require_internal_path_helper = __commonJS({ assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path20.sep; + root += path21.sep; } return root + itemPath; } @@ -34136,10 +34136,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path20.sep)) { + if (!p.endsWith(path21.sep)) { return p; } - if (p === path20.sep) { + if (p === path21.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -34472,7 +34472,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path20 = (function() { + var path21 = (function() { try { return require("path"); } catch (e) { @@ -34480,7 +34480,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path20.sep; + minimatch.sep = path21.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { @@ -34569,8 +34569,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path20.sep !== "/") { - pattern = pattern.split(path20.sep).join("/"); + if (!options.allowWindowsEscape && path21.sep !== "/") { + pattern = pattern.split(path21.sep).join("/"); } this.options = options; this.set = []; @@ -34939,8 +34939,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path20.sep !== "/") { - f = f.split(path20.sep).join("/"); + if (path21.sep !== "/") { + f = f.split(path21.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -35072,7 +35072,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -35087,12 +35087,12 @@ var require_internal_path = __commonJS({ assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path20.sep); + this.segments = itemPath.split(path21.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path20.basename(remaining); + const basename = path21.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -35110,7 +35110,7 @@ var require_internal_path = __commonJS({ assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - assert_1.default(!segment.includes(path20.sep), `Parameter 'itemPath' contains unexpected path separators`); + assert_1.default(!segment.includes(path21.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -35121,12 +35121,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path20.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path21.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path20.sep; + result += path21.sep; } result += this.segments[i]; } @@ -35170,7 +35170,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -35199,7 +35199,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir2); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path20.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path21.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -35223,8 +35223,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path20.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path20.sep}`; + if (!itemPath.endsWith(path21.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path21.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -35259,9 +35259,9 @@ var require_internal_pattern = __commonJS({ assert_1.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path20.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path21.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path20.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path21.sep}`)) { homedir2 = homedir2 || os5.homedir(); assert_1.default(homedir2, "Unable to determine HOME directory"); assert_1.default(pathHelper.hasAbsoluteRoot(homedir2), `Expected HOME directory to be a rooted path. Actual '${homedir2}'`); @@ -35345,8 +35345,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path20, level) { - this.path = path20; + constructor(path21, level) { + this.path = path21; this.level = level; } }; @@ -35466,9 +35466,9 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -35518,7 +35518,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs18.promises.lstat(searchPath)); + yield __await4(fs19.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -35549,7 +35549,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path20.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path21.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -35584,7 +35584,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs18.promises.stat(item.path); + stats = yield fs19.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -35596,10 +35596,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs18.promises.lstat(item.path); + stats = yield fs19.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs18.promises.realpath(item.path); + const realPath = yield fs19.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -36933,8 +36933,8 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar4(require_glob()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); - var path20 = __importStar4(require("path")); + var fs19 = __importStar4(require("fs")); + var path21 = __importStar4(require("path")); var semver9 = __importStar4(require_semver3()); var util = __importStar4(require("util")); var constants_1 = require_constants10(); @@ -36954,16 +36954,16 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path20.join(baseLocation, "actions", "temp"); + tempDirectory = path21.join(baseLocation, "actions", "temp"); } - const dest = path20.join(tempDirectory, crypto2.randomUUID()); + const dest = path21.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); } exports2.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { - return fs18.statSync(filePath).size; + return fs19.statSync(filePath).size; } exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { @@ -36980,7 +36980,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path20.relative(workspace, file).replace(new RegExp(`\\${path20.sep}`, "g"), "/"); + const relativeFile = path21.relative(workspace, file).replace(new RegExp(`\\${path21.sep}`, "g"), "/"); core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -37003,7 +37003,7 @@ var require_cacheUtils = __commonJS({ exports2.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter4(this, void 0, void 0, function* () { - return util.promisify(fs18.unlink)(filePath); + return util.promisify(fs19.unlink)(filePath); }); } exports2.unlinkFile = unlinkFile; @@ -37048,7 +37048,7 @@ var require_cacheUtils = __commonJS({ exports2.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter4(this, void 0, void 0, function* () { - if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -44886,15 +44886,15 @@ var require_urlHelpers = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path20 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path20.startsWith("/")) { - path20 = path20.substring(1); + let path21 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path21.startsWith("/")) { + path21 = path21.substring(1); } - if (isAbsoluteUrl(path20)) { - requestUrl = path20; + if (isAbsoluteUrl(path21)) { + requestUrl = path21; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path20); + requestUrl = appendPath(requestUrl, path21); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -44942,9 +44942,9 @@ var require_urlHelpers = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path20 = pathToAppend.substring(0, searchStart); + const path21 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path20; + newPath = newPath + path21; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -48821,7 +48821,7 @@ var require_dist7 = __commonJS({ var stream2 = require("stream"); var coreLro = require_dist6(); var events = require("events"); - var fs18 = require("fs"); + var fs19 = require("fs"); var util = require("util"); var buffer = require("buffer"); function _interopNamespaceDefault(e) { @@ -48844,7 +48844,7 @@ var require_dist7 = __commonJS({ } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); - var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs18); + var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs19); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util); var logger = logger$1.createClientLogger("storage-blob"); var BaseRequestPolicy = class { @@ -49093,10 +49093,10 @@ var require_dist7 = __commonJS({ ]; function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path20 = urlParsed.pathname; - path20 = path20 || "/"; - path20 = escape(path20); - urlParsed.pathname = path20; + let path21 = urlParsed.pathname; + path21 = path21 || "/"; + path21 = escape(path21); + urlParsed.pathname = path21; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -49181,9 +49181,9 @@ var require_dist7 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path20 = urlParsed.pathname; - path20 = path20 ? path20.endsWith("/") ? `${path20}${name}` : `${path20}/${name}` : name; - urlParsed.pathname = path20; + let path21 = urlParsed.pathname; + path21 = path21 ? path21.endsWith("/") ? `${path21}${name}` : `${path21}/${name}` : name; + urlParsed.pathname = path21; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -50264,9 +50264,9 @@ var require_dist7 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request) { - const path20 = getURLPath(request.url) || "/"; + const path21 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path20}`; + canonicalizedResourceString += `/${this.factory.accountName}${path21}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -50559,9 +50559,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { - const path20 = getURLPath(request.url) || "/"; + const path21 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path20}`; + canonicalizedResourceString += `/${options.accountName}${path21}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { @@ -69863,8 +69863,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path20 = getURLPath(subRequest.url); - if (!path20 || path20 === "") { + const path21 = getURLPath(subRequest.url); + if (!path21 || path21 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -69924,8 +69924,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url2, getCoreClientOptions(pipeline)); - const path20 = getURLPath(url2); - if (path20 && path20 !== "/") { + const path21 = getURLPath(url2); + if (path21 && path21 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -72692,7 +72692,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_dist7(); var buffer = __importStar4(require("buffer")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); var utils = __importStar4(require_cacheUtils()); @@ -72803,7 +72803,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter4(this, void 0, void 0, function* () { - const writeStream = fs18.createWriteStream(archivePath); + const writeStream = fs19.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -72829,7 +72829,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter4(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs18.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs19.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -72946,7 +72946,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs18.openSync(archivePath, "w"); + const fd = fs19.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -72964,12 +72964,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs18.writeFileSync(fd, result); + fs19.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs18.closeSync(fd); + fs19.closeSync(fd); } } }); @@ -73268,7 +73268,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar4(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var url_1 = require("url"); var utils = __importStar4(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -73406,7 +73406,7 @@ Other caches with similar key:`); return __awaiter4(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs18.openSync(archivePath, "r"); + const fd = fs19.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -73420,7 +73420,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, { fd, start, end, @@ -73431,7 +73431,7 @@ Other caches with similar key:`); } }))); } finally { - fs18.closeSync(fd); + fs19.closeSync(fd); } return; }); @@ -78675,7 +78675,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar4(require_io()); var fs_1 = require("fs"); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var constants_1 = require_constants10(); var IS_WINDOWS = process.platform === "win32"; @@ -78721,13 +78721,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path20.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path21.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -78773,7 +78773,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -78782,7 +78782,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path20.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path21.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -78797,7 +78797,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -78806,7 +78806,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path20.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path21.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -78846,7 +78846,7 @@ var require_tar = __commonJS({ exports2.extractTar = extractTar2; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter4(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path20.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path21.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -78916,7 +78916,7 @@ var require_cache3 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; var core14 = __importStar4(require_core()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var utils = __importStar4(require_cacheUtils()); var cacheHttpClient = __importStar4(require_cacheHttpClient()); var cacheTwirpClient = __importStar4(require_cacheTwirpClient()); @@ -79013,7 +79013,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path21.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core14.isDebug()) { @@ -79082,7 +79082,7 @@ var require_cache3 = __commonJS({ core14.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path20.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path21.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core14.debug(`Archive path: ${archivePath}`); core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -79145,7 +79145,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path21.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -79209,7 +79209,7 @@ var require_cache3 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path20.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path21.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -79389,7 +79389,7 @@ var require_internal_path_helper2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.safeTrimTrailingSeparator = exports2.normalizeSeparators = exports2.hasRoot = exports2.hasAbsoluteRoot = exports2.ensureAbsoluteRoot = exports2.dirname = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname3(p) { @@ -79397,7 +79397,7 @@ var require_internal_path_helper2 = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path20.dirname(p); + let result = path21.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -79435,7 +79435,7 @@ var require_internal_path_helper2 = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path20.sep; + root += path21.sep; } return root + itemPath; } @@ -79473,10 +79473,10 @@ var require_internal_path_helper2 = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path20.sep)) { + if (!p.endsWith(path21.sep)) { return p; } - if (p === path20.sep) { + if (p === path21.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -79627,7 +79627,7 @@ var require_internal_path2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -79642,12 +79642,12 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path20.sep); + this.segments = itemPath.split(path21.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename = path20.basename(remaining); + const basename = path21.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper.dirname(remaining); @@ -79665,7 +79665,7 @@ var require_internal_path2 = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path20.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path21.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -79676,12 +79676,12 @@ var require_internal_path2 = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path20.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path21.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path20.sep; + result += path21.sep; } result += this.segments[i]; } @@ -79729,7 +79729,7 @@ var require_internal_pattern2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var pathHelper = __importStar4(require_internal_path_helper2()); var assert_1 = __importDefault4(require("assert")); var minimatch_1 = require_minimatch(); @@ -79758,7 +79758,7 @@ var require_internal_pattern2 = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir2); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path20.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path21.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -79782,8 +79782,8 @@ var require_internal_pattern2 = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path20.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path20.sep}`; + if (!itemPath.endsWith(path21.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path21.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -79818,9 +79818,9 @@ var require_internal_pattern2 = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path20.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path21.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path20.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path21.sep}`)) { homedir2 = homedir2 || os5.homedir(); (0, assert_1.default)(homedir2, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir2), `Expected HOME directory to be a rooted path. Actual '${homedir2}'`); @@ -79904,8 +79904,8 @@ var require_internal_search_state2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path20, level) { - this.path = path20; + constructor(path21, level) { + this.path = path21; this.level = level; } }; @@ -80029,9 +80029,9 @@ var require_internal_globber2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var globOptionsHelper = __importStar4(require_internal_glob_options_helper2()); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var patternHelper = __importStar4(require_internal_pattern_helper2()); var internal_match_kind_1 = require_internal_match_kind2(); var internal_pattern_1 = require_internal_pattern2(); @@ -80083,7 +80083,7 @@ var require_internal_globber2 = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await4(fs18.promises.lstat(searchPath)); + yield __await4(fs19.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -80107,7 +80107,7 @@ var require_internal_globber2 = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path20.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path21.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -80117,7 +80117,7 @@ var require_internal_globber2 = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await4(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path20.join(item.path, x), childLevel)); + const childItems = (yield __await4(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path21.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await4(item.path); @@ -80152,7 +80152,7 @@ var require_internal_globber2 = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs18.promises.stat(item.path); + stats = yield fs19.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -80164,10 +80164,10 @@ var require_internal_globber2 = __commonJS({ throw err; } } else { - stats = yield fs18.promises.lstat(item.path); + stats = yield fs19.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs18.promises.realpath(item.path); + const realPath = yield fs19.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -80266,10 +80266,10 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = void 0; var crypto2 = __importStar4(require("crypto")); var core14 = __importStar4(require_core()); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var stream2 = __importStar4(require("stream")); var util = __importStar4(require("util")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); function hashFiles2(globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; @@ -80285,17 +80285,17 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path20.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path21.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs18.statSync(file).isDirectory()) { + if (fs19.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs18.createReadStream(file), hash); + yield pipeline(fs19.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) { @@ -80444,7 +80444,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os5 = require("os"); var cp = require("child_process"); - var fs18 = require("fs"); + var fs19 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter4(this, void 0, void 0, function* () { const platFilter = os5.platform(); @@ -80508,10 +80508,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs18.existsSync(lsbReleaseFile)) { - contents = fs18.readFileSync(lsbReleaseFile).toString(); - } else if (fs18.existsSync(osReleaseFile)) { - contents = fs18.readFileSync(osReleaseFile).toString(); + if (fs19.existsSync(lsbReleaseFile)) { + contents = fs19.readFileSync(lsbReleaseFile).toString(); + } else if (fs19.existsSync(osReleaseFile)) { + contents = fs19.readFileSync(osReleaseFile).toString(); } return contents; } @@ -80688,10 +80688,10 @@ var require_tool_cache = __commonJS({ var core14 = __importStar4(require_core()); var io7 = __importStar4(require_io()); var crypto2 = __importStar4(require("crypto")); - var fs18 = __importStar4(require("fs")); + var fs19 = __importStar4(require("fs")); var mm = __importStar4(require_manifest()); var os5 = __importStar4(require("os")); - var path20 = __importStar4(require("path")); + var path21 = __importStar4(require("path")); var httpm = __importStar4(require_lib()); var semver9 = __importStar4(require_semver2()); var stream2 = __importStar4(require("stream")); @@ -80712,8 +80712,8 @@ var require_tool_cache = __commonJS({ var userAgent = "actions/tool-cache"; function downloadTool2(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - dest = dest || path20.join(_getTempDirectory(), crypto2.randomUUID()); - yield io7.mkdirP(path20.dirname(dest)); + dest = dest || path21.join(_getTempDirectory(), crypto2.randomUUID()); + yield io7.mkdirP(path21.dirname(dest)); core14.debug(`Downloading ${url}`); core14.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -80735,7 +80735,7 @@ var require_tool_cache = __commonJS({ exports2.downloadTool = downloadTool2; function downloadToolAttempt(url, dest, auth, headers) { return __awaiter4(this, void 0, void 0, function* () { - if (fs18.existsSync(dest)) { + if (fs19.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent, [], { @@ -80759,7 +80759,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs18.createWriteStream(dest)); + yield pipeline(readStream, fs19.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -80800,7 +80800,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path20.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path21.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -80971,12 +80971,12 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs18.statSync(sourceDir).isDirectory()) { + if (!fs19.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs18.readdirSync(sourceDir)) { - const s = path20.join(sourceDir, itemName); + for (const itemName of fs19.readdirSync(sourceDir)) { + const s = path21.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch2); @@ -80990,11 +80990,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source file: ${sourceFile}`); - if (!fs18.statSync(sourceFile).isFile()) { + if (!fs19.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path20.join(destFolder, targetFile); + const destPath = path21.join(destFolder, targetFile); core14.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); @@ -81018,9 +81018,9 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path20.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path21.join(_getCacheDirectory(), toolName, versionSpec, arch2); core14.debug(`checking cache: ${cachePath}`); - if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { + if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -81033,13 +81033,13 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch2) { const versions = []; arch2 = arch2 || os5.arch(); - const toolPath = path20.join(_getCacheDirectory(), toolName); - if (fs18.existsSync(toolPath)) { - const children = fs18.readdirSync(toolPath); + const toolPath = path21.join(_getCacheDirectory(), toolName); + if (fs19.existsSync(toolPath)) { + const children = fs19.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path20.join(toolPath, child, arch2 || ""); - if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) { + const fullPath = path21.join(toolPath, child, arch2 || ""); + if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -81093,7 +81093,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter4(this, void 0, void 0, function* () { if (!dest) { - dest = path20.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path21.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -81101,7 +81101,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch2) { return __awaiter4(this, void 0, void 0, function* () { - const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path21.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -81111,9 +81111,9 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch2) { - const folderPath = path20.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path21.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs18.writeFileSync(markerPath, ""); + fs19.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -81721,8 +81721,8 @@ var require_follow_redirects = __commonJS({ }); // src/init-action.ts -var fs17 = __toESM(require("fs")); -var path19 = __toESM(require("path")); +var fs18 = __toESM(require("fs")); +var path20 = __toESM(require("path")); var core13 = __toESM(require_core()); var io6 = __toESM(require_io()); var semver8 = __toESM(require_semver2()); @@ -82178,12 +82178,12 @@ var import_fast_glob = __toESM(require_out4(), 1); var import_ignore = __toESM(require_ignore(), 1); // node_modules/slash/index.js -function slash(path20) { - const isExtendedLengthPath = path20.startsWith("\\\\?\\"); +function slash(path21) { + const isExtendedLengthPath = path21.startsWith("\\\\?\\"); if (isExtendedLengthPath) { - return path20; + return path21; } - return path20.replace(/\\/g, "/"); + return path21.replace(/\\/g, "/"); } // node_modules/globby/utilities.js @@ -82271,8 +82271,8 @@ var assertPatternsInput = (patterns) => { } }; var normalizePathForDirectoryGlob = (filePath, cwd) => { - const path20 = isNegativePattern(filePath) ? filePath.slice(1) : filePath; - return import_node_path3.default.isAbsolute(path20) ? path20 : import_node_path3.default.join(cwd, path20); + const path21 = isNegativePattern(filePath) ? filePath.slice(1) : filePath; + return import_node_path3.default.isAbsolute(path21) ? path21 : import_node_path3.default.join(cwd, path21); }; var getDirectoryGlob = ({ directoryPath, files, extensions }) => { const extensionGlob = extensions?.length > 0 ? `.${extensions.length > 1 ? `{${extensions.join(",")}}` : extensions[0]}` : ""; @@ -82671,21 +82671,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs18 = options.fs || await import("node:fs/promises"); + const fs19 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); + const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error2) => errors.push(error2)); + const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error2) => errors.push(error2)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -86228,8 +86228,8 @@ function getDependencyCachingEnabled() { } // src/config-utils.ts -var fs9 = __toESM(require("fs")); -var path11 = __toESM(require("path")); +var fs10 = __toESM(require("fs")); +var path12 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); // src/analyses.ts @@ -86561,6 +86561,10 @@ function generateCodeScanningConfig(logger, originalUserInput, augmentationPrope return augmentedConfig; } +// src/diff-informed-analysis-utils.ts +var fs8 = __toESM(require("fs")); +var path10 = __toESM(require("path")); + // src/feature-flags.ts var fs7 = __toESM(require("fs")); var path9 = __toESM(require("path")); @@ -86677,8 +86681,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path20 = decodeGitFilePath(match[2]); - fileOidMap[path20] = oid; + const path21 = decodeGitFilePath(match[2]); + fileOidMap[path21] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -86804,12 +86808,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path8.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs6.existsSync(path8.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path8.join( @@ -87439,6 +87472,129 @@ async function getDiffInformedAnalysisBranches(codeql, features, logger) { } return branches; } +function getDiffRangesJsonFilePath() { + return path10.join(getTemporaryDirectory(), "pr-diff-range.json"); +} +function writeDiffRangesJsonFile(logger, ranges) { + const jsonContents = JSON.stringify(ranges, null, 2); + const jsonFilePath = getDiffRangesJsonFilePath(); + fs8.writeFileSync(jsonFilePath, jsonContents); + logger.debug( + `Wrote pr-diff-range JSON file to ${jsonFilePath}: +${jsonContents}` + ); +} +async function getPullRequestEditedDiffRanges(branches, logger) { + const fileDiffs = await getFileDiffsWithBasehead(branches, logger); + if (fileDiffs === void 0) { + return void 0; + } + if (fileDiffs.length >= 300) { + logger.warning( + `Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.` + ); + return void 0; + } + const results = []; + for (const filediff of fileDiffs) { + const diffRanges = getDiffRanges(filediff, logger); + if (diffRanges === void 0) { + return void 0; + } + results.push(...diffRanges); + } + return results; +} +async function getFileDiffsWithBasehead(branches, logger) { + const repositoryNwo = getRepositoryNwoFromEnv( + "CODE_SCANNING_REPOSITORY", + "GITHUB_REPOSITORY" + ); + const basehead = `${branches.base}...${branches.head}`; + try { + const response = await getApiClient().rest.repos.compareCommitsWithBasehead( + { + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + basehead, + per_page: 1 + } + ); + logger.debug( + `Response from compareCommitsWithBasehead(${basehead}): +${JSON.stringify(response, null, 2)}` + ); + return response.data.files; + } catch (error2) { + if (error2.status) { + logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`); + logger.debug( + `Error running compareCommitsWithBasehead(${basehead}): +Request: ${JSON.stringify(error2.request, null, 2)} +Error Response: ${JSON.stringify(error2.response, null, 2)}` + ); + return void 0; + } else { + throw error2; + } + } +} +function getDiffRanges(fileDiff, logger) { + const filename = path10.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path10.sep, "/"); + if (fileDiff.patch === void 0) { + if (fileDiff.changes === 0) { + return []; + } + return [ + { + path: filename, + startLine: 0, + endLine: 0 + } + ]; + } + let currentLine = 0; + let additionRangeStartLine = void 0; + const diffRanges = []; + const diffLines = fileDiff.patch.split("\n"); + diffLines.push(" "); + for (const diffLine of diffLines) { + if (diffLine.startsWith("-")) { + continue; + } + if (diffLine.startsWith("+")) { + if (additionRangeStartLine === void 0) { + additionRangeStartLine = currentLine; + } + currentLine++; + continue; + } + if (additionRangeStartLine !== void 0) { + diffRanges.push({ + path: filename, + startLine: additionRangeStartLine, + endLine: currentLine - 1 + }); + additionRangeStartLine = void 0; + } + if (diffLine.startsWith("@@ ")) { + const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); + if (match === null) { + logger.warning( + `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}` + ); + return void 0; + } + currentLine = parseInt(match[1], 10); + continue; + } + if (diffLine.startsWith(" ")) { + currentLine++; + continue; + } + } + return diffRanges; +} // src/languages.ts var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { @@ -87456,8 +87612,8 @@ var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { })(KnownLanguage || {}); // src/trap-caching.ts -var fs8 = __toESM(require("fs")); -var path10 = __toESM(require("path")); +var fs9 = __toESM(require("fs")); +var path11 = __toESM(require("path")); var actionsCache2 = __toESM(require_cache3()); var CACHE_VERSION2 = 1; var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap"; @@ -87473,13 +87629,13 @@ async function downloadTrapCaches(codeql, languages, logger) { `Found ${languagesSupportingCaching.length} languages that support TRAP caching` ); if (languagesSupportingCaching.length === 0) return result; - const cachesDir = path10.join( + const cachesDir = path11.join( getTemporaryDirectory(), "trapCaches" ); for (const language of languagesSupportingCaching) { - const cacheDir = path10.join(cachesDir, language); - fs8.mkdirSync(cacheDir, { recursive: true }); + const cacheDir = path11.join(cachesDir, language); + fs9.mkdirSync(cacheDir, { recursive: true }); result[language] = cacheDir; } if (await isAnalyzingDefaultBranch()) { @@ -87491,7 +87647,7 @@ async function downloadTrapCaches(codeql, languages, logger) { let baseSha = "unknown"; const eventPath = process.env.GITHUB_EVENT_PATH; if (getWorkflowEventName() === "pull_request" && eventPath !== void 0) { - const event = JSON.parse(fs8.readFileSync(path10.resolve(eventPath), "utf-8")); + const event = JSON.parse(fs9.readFileSync(path11.resolve(eventPath), "utf-8")); baseSha = event.pull_request?.base?.sha || baseSha; } for (const language of languages) { @@ -87593,9 +87749,9 @@ async function getSupportedLanguageMap(codeql, features, logger) { } var baseWorkflowsPath = ".github/workflows"; function hasActionsWorkflows(sourceRoot) { - const workflowsPath = path11.resolve(sourceRoot, baseWorkflowsPath); - const stats = fs9.lstatSync(workflowsPath, { throwIfNoEntry: false }); - return stats !== void 0 && stats.isDirectory() && fs9.readdirSync(workflowsPath).length > 0; + const workflowsPath = path12.resolve(sourceRoot, baseWorkflowsPath); + const stats = fs10.lstatSync(workflowsPath, { throwIfNoEntry: false }); + return stats !== void 0 && stats.isDirectory() && fs10.readdirSync(workflowsPath).length > 0; } async function getRawLanguagesInRepo(repository, sourceRoot, logger) { logger.debug( @@ -87765,8 +87921,8 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg async function loadUserConfig(configFile, workspacePath, apiDetails, tempDir) { if (isLocal(configFile)) { if (configFile !== userConfigFromActionPath(tempDir)) { - configFile = path11.resolve(workspacePath, configFile); - if (!(configFile + path11.sep).startsWith(workspacePath + path11.sep)) { + configFile = path12.resolve(workspacePath, configFile); + if (!(configFile + path12.sep).startsWith(workspacePath + path12.sep)) { throw new ConfigurationError( getConfigFileOutsideWorkspaceErrorMessage(configFile) ); @@ -87895,10 +88051,10 @@ async function getOverlayDatabaseMode(codeql, repository, features, languages, s }; } function dbLocationOrDefault(dbLocation, tempDir) { - return dbLocation || path11.resolve(tempDir, "codeql_databases"); + return dbLocation || path12.resolve(tempDir, "codeql_databases"); } function userConfigFromActionPath(tempDir) { - return path11.resolve(tempDir, "user-config-from-action.yml"); + return path12.resolve(tempDir, "user-config-from-action.yml"); } function hasQueryCustomisation(userConfig) { return isDefined(userConfig["disable-default-queries"]) || isDefined(userConfig.queries) || isDefined(userConfig["query-filters"]); @@ -87912,7 +88068,7 @@ async function initConfig(inputs) { ); } inputs.configFile = userConfigFromActionPath(tempDir); - fs9.writeFileSync(inputs.configFile, inputs.configInput); + fs10.writeFileSync(inputs.configFile, inputs.configInput); logger.debug(`Using config from action input: ${inputs.configFile}`); } let userConfig = {}; @@ -87987,12 +88143,12 @@ function isLocal(configPath) { return configPath.indexOf("@") === -1; } function getLocalConfig(configFile) { - if (!fs9.existsSync(configFile)) { + if (!fs10.existsSync(configFile)) { throw new ConfigurationError( getConfigFileDoesNotExistErrorMessage(configFile) ); } - return load(fs9.readFileSync(configFile, "utf8")); + return load(fs10.readFileSync(configFile, "utf8")); } async function getRemoteConfig(configFile, apiDetails) { const format = new RegExp( @@ -88027,13 +88183,13 @@ async function getRemoteConfig(configFile, apiDetails) { ); } function getPathToParsedConfigFile(tempDir) { - return path11.join(tempDir, "config"); + return path12.join(tempDir, "config"); } async function saveConfig(config, logger) { const configString = JSON.stringify(config); const configFile = getPathToParsedConfigFile(config.tempDir); - fs9.mkdirSync(path11.dirname(configFile), { recursive: true }); - fs9.writeFileSync(configFile, configString, "utf8"); + fs10.mkdirSync(path12.dirname(configFile), { recursive: true }); + fs10.writeFileSync(configFile, configString, "utf8"); logger.debug("Saved config:"); logger.debug(configString); } @@ -88043,9 +88199,9 @@ async function generateRegistries(registriesInput, tempDir, logger) { let qlconfigFile; if (registries) { const qlconfig = createRegistriesBlock(registries); - qlconfigFile = path11.join(tempDir, "qlconfig.yml"); + qlconfigFile = path12.join(tempDir, "qlconfig.yml"); const qlconfigContents = dump(qlconfig); - fs9.writeFileSync(qlconfigFile, qlconfigContents, "utf8"); + fs10.writeFileSync(qlconfigFile, qlconfigContents, "utf8"); logger.debug("Generated qlconfig.yml:"); logger.debug(qlconfigContents); registriesAuthTokens = registries.map((registry) => `${registry.url}=${registry.token}`).join(","); @@ -88323,14 +88479,14 @@ function flushDiagnostics(config) { } // src/init.ts -var fs15 = __toESM(require("fs")); -var path17 = __toESM(require("path")); +var fs16 = __toESM(require("fs")); +var path18 = __toESM(require("path")); var toolrunner4 = __toESM(require_toolrunner()); var io5 = __toESM(require_io()); // src/codeql.ts -var fs14 = __toESM(require("fs")); -var path16 = __toESM(require("path")); +var fs15 = __toESM(require("fs")); +var path17 = __toESM(require("path")); var core10 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -88611,15 +88767,15 @@ function wrapCliConfigurationError(cliError) { } // src/setup-codeql.ts -var fs12 = __toESM(require("fs")); -var path14 = __toESM(require("path")); +var fs13 = __toESM(require("fs")); +var path15 = __toESM(require("path")); var toolcache3 = __toESM(require_tool_cache()); var import_fast_deep_equal = __toESM(require_fast_deep_equal()); var semver7 = __toESM(require_semver2()); // src/tar.ts var import_child_process = require("child_process"); -var fs10 = __toESM(require("fs")); +var fs11 = __toESM(require("fs")); var stream = __toESM(require("stream")); var import_toolrunner = __toESM(require_toolrunner()); var io4 = __toESM(require_io()); @@ -88692,7 +88848,7 @@ async function isZstdAvailable(logger) { } } async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs10.mkdirSync(dest, { recursive: true }); + fs11.mkdirSync(dest, { recursive: true }); switch (compressionMethod) { case "gzip": return await toolcache.extractTar(tarPath, dest); @@ -88776,9 +88932,9 @@ function inferCompressionMethod(tarPath) { } // src/tools-download.ts -var fs11 = __toESM(require("fs")); +var fs12 = __toESM(require("fs")); var os3 = __toESM(require("os")); -var path13 = __toESM(require("path")); +var path14 = __toESM(require("path")); var import_perf_hooks2 = require("perf_hooks"); var core9 = __toESM(require_core()); var import_http_client = __toESM(require_lib()); @@ -88883,7 +89039,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs11.mkdirSync(dest, { recursive: true }); + fs12.mkdirSync(dest, { recursive: true }); const agent = new import_http_client.HttpClient().getAgent(codeqlURL); headers = Object.assign( { "User-Agent": "CodeQL Action" }, @@ -88911,7 +89067,7 @@ async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorizatio await extractTarZst(response, dest, tarVersion, logger); } function getToolcacheDirectory(version) { - return path13.join( + return path14.join( getRequiredEnvParam("RUNNER_TOOL_CACHE"), TOOLCACHE_TOOL_NAME, semver6.clean(version) || version, @@ -88920,7 +89076,7 @@ function getToolcacheDirectory(version) { } function writeToolcacheMarkerFile(extractedPath, logger) { const markerFilePath = `${extractedPath}.complete`; - fs11.writeFileSync(markerFilePath, ""); + fs12.writeFileSync(markerFilePath, ""); logger.info(`Created toolcache marker file ${markerFilePath}`); } function sanitizeUrlForStatusReport(url) { @@ -89055,7 +89211,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) { const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ folder: toolcache3.find("CodeQL", version), version - })).filter(({ folder }) => fs12.existsSync(path14.join(folder, "pinned-version"))); + })).filter(({ folder }) => fs13.existsSync(path15.join(folder, "pinned-version"))); if (candidates.length === 1) { const candidate = candidates[0]; logger.debug( @@ -89418,7 +89574,7 @@ async function useZstdBundle(cliVersion2, tarSupportsZstd) { ); } function getTempExtractionDir(tempDir) { - return path14.join(tempDir, v4_default()); + return path15.join(tempDir, v4_default()); } async function getNightlyToolsUrl(logger) { const zstdAvailability = await isZstdAvailable(logger); @@ -89466,8 +89622,8 @@ function isReservedToolsValue(tools) { } // src/tracer-config.ts -var fs13 = __toESM(require("fs")); -var path15 = __toESM(require("path")); +var fs14 = __toESM(require("fs")); +var path16 = __toESM(require("path")); async function shouldEnableIndirectTracing(codeql, config) { if (config.buildMode === "none" /* None */) { return false; @@ -89479,8 +89635,8 @@ async function shouldEnableIndirectTracing(codeql, config) { } async function getTracerConfigForCluster(config) { const tracingEnvVariables = JSON.parse( - fs13.readFileSync( - path15.resolve( + fs14.readFileSync( + path16.resolve( config.dbLocation, "temp/tracingEnvironment/start-tracing.json" ), @@ -89527,7 +89683,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV toolsDownloadStatusReport )}` ); - let codeqlCmd = path16.join(codeqlFolder, "codeql", "codeql"); + let codeqlCmd = path17.join(codeqlFolder, "codeql", "codeql"); if (process.platform === "win32") { codeqlCmd += ".exe"; } else if (process.platform !== "linux" && process.platform !== "darwin") { @@ -89583,17 +89739,17 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async isTracedLanguage(language) { const extractorPath = await this.resolveExtractor(language); - const tracingConfigPath = path16.join( + const tracingConfigPath = path17.join( extractorPath, "tools", "tracing-config.lua" ); - return fs14.existsSync(tracingConfigPath); + return fs15.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -89628,6 +89784,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); @@ -89659,7 +89816,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { }, async runAutobuild(config, language) { applyAutobuildAzurePipelinesTimeoutFix(); - const autobuildCmd = path16.join( + const autobuildCmd = path17.join( await this.resolveExtractor(language), "tools", process.platform === "win32" ? "autobuild.cmd" : "autobuild.sh" @@ -90049,7 +90206,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs14.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs15.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -90072,7 +90229,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) { ]; } function getGeneratedCodeScanningConfigPath(config) { - return path16.resolve(config.tempDir, "user-config.yaml"); + return path17.resolve(config.tempDir, "user-config.yaml"); } function getExtractionVerbosityArguments(enableDebugLogging) { return enableDebugLogging ? [`--verbosity=${EXTRACTION_DEBUG_MODE_VERBOSITY}`] : []; @@ -90125,8 +90282,8 @@ async function initConfig2(inputs) { return await initConfig(inputs); }); } -async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) { - fs15.mkdirSync(config.dbLocation, { recursive: true }); +async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { + fs16.mkdirSync(config.dbLocation, { recursive: true }); await wrapEnvironment( databaseInitEnvironment, async () => await codeql.databaseInitCluster( @@ -90134,6 +90291,7 @@ async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, s sourceRoot, processName, qlconfigFile, + prDiffChangedFiles, logger ) ); @@ -90161,25 +90319,25 @@ async function checkPacksForOverlayCompatibility(codeql, config, logger) { } function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) { try { - let qlpackPath = path17.join(packDir, "qlpack.yml"); - if (!fs15.existsSync(qlpackPath)) { - qlpackPath = path17.join(packDir, "codeql-pack.yml"); + let qlpackPath = path18.join(packDir, "qlpack.yml"); + if (!fs16.existsSync(qlpackPath)) { + qlpackPath = path18.join(packDir, "codeql-pack.yml"); } const qlpackContents = load( - fs15.readFileSync(qlpackPath, "utf8") + fs16.readFileSync(qlpackPath, "utf8") ); if (!qlpackContents.buildMetadata) { return true; } - const packInfoPath = path17.join(packDir, ".packinfo"); - if (!fs15.existsSync(packInfoPath)) { + const packInfoPath = path18.join(packDir, ".packinfo"); + if (!fs16.existsSync(packInfoPath)) { logger.warning( `The query pack at ${packDir} does not have a .packinfo file, so it cannot support overlay analysis. Recompiling the query pack with the latest CodeQL CLI should solve this problem.` ); return false; } const packInfoFileContents = JSON.parse( - fs15.readFileSync(packInfoPath, "utf8") + fs16.readFileSync(packInfoPath, "utf8") ); const packOverlayVersion = packInfoFileContents.overlayVersion; if (typeof packOverlayVersion !== "number") { @@ -90204,7 +90362,7 @@ function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) } async function checkInstallPython311(languages, codeql) { if (languages.includes("python" /* python */) && process.platform === "win32" && !(await codeql.getVersion()).features?.supportsPython312) { - const script = path17.resolve( + const script = path18.resolve( __dirname, "../python-setup", "check_python12.ps1" @@ -90214,8 +90372,8 @@ async function checkInstallPython311(languages, codeql) { ]).exec(); } } -function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs15.rmSync) { - if (fs15.existsSync(config.dbLocation) && (fs15.statSync(config.dbLocation).isFile() || fs15.readdirSync(config.dbLocation).length > 0)) { +function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs16.rmSync) { + if (fs16.existsSync(config.dbLocation) && (fs16.statSync(config.dbLocation).isFile() || fs16.readdirSync(config.dbLocation).length > 0)) { if (!options.disableExistingDirectoryWarning) { logger.warning( `The database cluster directory ${config.dbLocation} must be empty. Attempting to clean it up.` @@ -90472,8 +90630,8 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config } // src/workflow.ts -var fs16 = __toESM(require("fs")); -var path18 = __toESM(require("path")); +var fs17 = __toESM(require("fs")); +var path19 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); var core12 = __toESM(require_core()); function toCodedErrors(errors) { @@ -90624,15 +90782,15 @@ async function getWorkflow(logger) { ); } const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs16.readFileSync(workflowPath, "utf-8")); + return load(fs17.readFileSync(workflowPath, "utf-8")); } async function getWorkflowAbsolutePath(logger) { const relativePath = await getWorkflowRelativePath(); - const absolutePath = path18.join( + const absolutePath = path19.join( getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath ); - if (fs16.existsSync(absolutePath)) { + if (fs17.existsSync(absolutePath)) { logger.debug( `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` ); @@ -90698,6 +90856,7 @@ async function run() { initializeEnvironment(getActionVersion()); persistInputs(); let config; + let prDiffChangedFiles; let codeql; let toolsDownloadStatusReport; let toolsFeatureFlagsValid; @@ -90729,7 +90888,7 @@ async function run() { core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); const configFile = getOptionalInput("config-file"); - const sourceRoot = path19.resolve( + const sourceRoot = path20.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), getOptionalInput("source-root") || "" ); @@ -90827,6 +90986,11 @@ async function run() { logger }); await checkInstallPython311(config.languages, codeql); + prDiffChangedFiles = await computeAndPersistDiffRanges( + codeql, + features, + logger + ); } catch (unwrappedError) { const error2 = wrapError(unwrappedError); core13.setFailed(error2.message); @@ -90908,21 +91072,21 @@ async function run() { )) { try { logger.debug(`Applying static binary workaround for Go`); - const tempBinPath = path19.resolve( + const tempBinPath = path20.resolve( getTemporaryDirectory(), "codeql-action-go-tracing", "bin" ); - fs17.mkdirSync(tempBinPath, { recursive: true }); + fs18.mkdirSync(tempBinPath, { recursive: true }); core13.addPath(tempBinPath); - const goWrapperPath = path19.resolve(tempBinPath, "go"); - fs17.writeFileSync( + const goWrapperPath = path20.resolve(tempBinPath, "go"); + fs18.writeFileSync( goWrapperPath, `#!/bin/bash exec ${goBinaryPath} "$@"` ); - fs17.chmodSync(goWrapperPath, "755"); + fs18.chmodSync(goWrapperPath, "755"); core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( @@ -91054,6 +91218,7 @@ exec ${goBinaryPath} "$@"` sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger ); if (config.overlayDatabaseMode !== "none" /* None */ && !await checkPacksForOverlayCompatibility(codeql, config, logger)) { @@ -91071,6 +91236,7 @@ exec ${goBinaryPath} "$@"` sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger ); } @@ -91118,6 +91284,35 @@ exec ${goBinaryPath} "$@"` logger ); } +async function computeAndPersistDiffRanges(codeql, features, logger) { + try { + return await withGroupAsync("Compute PR diff ranges", async () => { + const branches = await getDiffInformedAnalysisBranches( + codeql, + features, + logger + ); + if (!branches) { + return void 0; + } + const ranges = await getPullRequestEditedDiffRanges(branches, logger); + if (ranges === void 0) { + return void 0; + } + writeDiffRangesJsonFile(logger, ranges); + const distinctFiles = new Set(ranges.map((r) => r.path)); + logger.info( + `Persisted ${ranges.length} diff range(s) across ${distinctFiles.size} file(s) for reuse during analyze step.` + ); + return distinctFiles; + }); + } catch (e) { + logger.warning( + `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}` + ); + return void 0; + } +} function getTrapCachingEnabled() { const trapCaching = getOptionalInput("trap-caching"); if (trapCaching !== void 0) return trapCaching === "true"; diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index 4d1cdf81af..e3b1867aac 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -78494,12 +78494,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path2.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path2.join( @@ -78870,7 +78899,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -78905,6 +78934,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/upload-lib.js b/lib/upload-lib.js index e7c0bb5ecd..1ab41cd3d7 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -89289,12 +89289,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -90615,7 +90644,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -90650,6 +90679,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index ecdb515e3f..c1a10c1f2a 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -89204,12 +89204,41 @@ async function readBaseDatabaseOidsFile(config, logger) { function getBaseDatabaseOidsFilePath(config) { return path7.join(config.dbLocation, "base-database-oids.json"); } -async function writeOverlayChangesFile(config, sourceRoot, logger) { +async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}` + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database." + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${e.message || e}` + ); + } logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.` + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.` ); const changedFilesJson = JSON.stringify({ changes: changedFiles }); const overlayChangesFile = path7.join( @@ -91287,7 +91316,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { async isScannedLanguage(language) { return !await this.isTracedLanguage(language); }, - async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) { + async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) { const extraArgs = config.languages.map( (language) => `--language=${language}` ); @@ -91322,6 +91351,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/src/analyze-action.ts b/src/analyze-action.ts index 3d0fb1c89e..864063af89 100644 --- a/src/analyze-action.ts +++ b/src/analyze-action.ts @@ -30,7 +30,6 @@ import { DependencyCacheUploadStatusReport, uploadDependencyCaches, } from "./dependency-caching"; -import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { Feature, Features } from "./feature-flags"; import { KnownLanguage } from "./languages"; @@ -299,14 +298,8 @@ async function run() { logger, ); - const branches = await getDiffInformedAnalysisBranches( - codeql, - features, - logger, - ); - const diffRangePackDir = branches - ? await setupDiffInformedQueryRun(branches, logger) - : undefined; + // Setup diff informed analysis if needed (based on whether init created the file) + const diffRangePackDir = await setupDiffInformedQueryRun(logger); await warnIfGoInstalledAfterInit(config, logger); await runAutobuildIfLegacyGoWorkflow(config, logger); diff --git a/src/analyze.test.ts b/src/analyze.test.ts index f3d516a78a..b6880b43da 100644 --- a/src/analyze.test.ts +++ b/src/analyze.test.ts @@ -4,10 +4,8 @@ import * as path from "path"; import test from "ava"; import * as sinon from "sinon"; -import * as actionsUtil from "./actions-util"; import { CodeQuality, CodeScanning } from "./analyses"; import { - exportedForTesting, runQueries, defaultSuites, resolveQuerySuiteAlias, @@ -131,204 +129,6 @@ test("status report fields", async (t) => { }); }); -function runGetDiffRanges(changes: number, patch: string[] | undefined): any { - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("checkout_path") - .returns("/checkout/path"); - return exportedForTesting.getDiffRanges( - { - filename: "test.txt", - changes, - patch: patch?.join("\n"), - }, - getRunnerLogger(true), - ); -} - -test("getDiffRanges: file unchanged", async (t) => { - const diffRanges = runGetDiffRanges(0, undefined); - t.deepEqual(diffRanges, []); -}); - -test("getDiffRanges: file diff too large", async (t) => { - const diffRanges = runGetDiffRanges(1000000, undefined); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 0, - endLine: 0, - }, - ]); -}); - -test("getDiffRanges: diff thunk with single addition range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,6 +50,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 54, - }, - ]); -}); - -test("getDiffRanges: diff thunk with single deletion range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,8 +50,6 @@", - " a", - " b", - " c", - "-1", - "-2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, []); -}); - -test("getDiffRanges: diff thunk with single update range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,7 @@", - " a", - " b", - " c", - "-1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 53, - }, - ]); -}); - -test("getDiffRanges: diff thunk with addition ranges", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,9 @@", - " a", - " b", - " c", - "+1", - " c", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 53, - }, - { - path: "/checkout/path/test.txt", - startLine: 55, - endLine: 55, - }, - ]); -}); - -test("getDiffRanges: diff thunk with mixed ranges", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,7 +50,7 @@", - " a", - " b", - " c", - "-1", - " d", - "-2", - "+3", - " e", - " f", - "+4", - "+5", - " g", - " h", - " i", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 54, - endLine: 54, - }, - { - path: "/checkout/path/test.txt", - startLine: 57, - endLine: 58, - }, - ]); -}); - -test("getDiffRanges: multiple diff thunks", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,6 +50,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - "@@ -130,6 +150,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 54, - }, - { - path: "/checkout/path/test.txt", - startLine: 153, - endLine: 154, - }, - ]); -}); - -test("getDiffRanges: no diff context lines", async (t) => { - const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 50, - endLine: 51, - }, - ]); -}); - -test("getDiffRanges: malformed thunk header", async (t) => { - const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]); - t.deepEqual(diffRanges, undefined); -}); - test("resolveQuerySuiteAlias", (t) => { // default query suite names should resolve to something language-specific ending in `.qls`. for (const suite of defaultSuites) { diff --git a/src/analyze.ts b/src/analyze.ts index b7eec921ac..1a5da2f1bf 100644 --- a/src/analyze.ts +++ b/src/analyze.ts @@ -6,13 +6,9 @@ import * as io from "@actions/io"; import * as del from "del"; import * as yaml from "js-yaml"; -import { - getRequiredInput, - getTemporaryDirectory, - PullRequestBranches, -} from "./actions-util"; +import { getTemporaryDirectory } from "./actions-util"; import * as analyses from "./analyses"; -import { getApiClient } from "./api-client"; +// (getApiClient import removed; no longer needed after diff refactor) import { setupCppAutobuild } from "./autobuild"; import { type CodeQL } from "./codeql"; import * as configUtils from "./config-utils"; @@ -20,14 +16,14 @@ import { getJavaTempDependencyDir } from "./dependency-caching"; import { addDiagnostic, makeDiagnostic } from "./diagnostics"; import { DiffThunkRange, - writeDiffRangesJsonFile, + readDiffRangesJsonFile, } from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { FeatureEnablement, Feature } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; import { OverlayDatabaseMode } from "./overlay-database-utils"; -import { getRepositoryNwoFromEnv } from "./repository"; +// getRepositoryNwoFromEnv no longer needed after extracting diff logic import { DatabaseCreationTimings, EventReport } from "./status-report"; import { endTracingForCluster } from "./tracer-config"; import * as util from "./util"; @@ -287,16 +283,36 @@ async function finalizeDatabaseCreation( * the diff range information, or `undefined` if the feature is disabled. */ export async function setupDiffInformedQueryRun( - branches: PullRequestBranches, logger: Logger, ): Promise { return await withGroupAsync( "Generating diff range extension pack", async () => { + // Only use precomputed diff ranges; never recompute here. + let diffRanges: DiffThunkRange[] | undefined; + try { + diffRanges = readDiffRangesJsonFile(logger); + } catch (e) { + logger.debug( + `Failed to read precomputed diff ranges: ${util.getErrorMessage(e)}`, + ); + diffRanges = undefined; + } + + if (diffRanges === undefined) { + logger.info( + "No precomputed diff ranges found; skipping diff-informed analysis stage.", + ); + return undefined; + } + + const fileCount = new Set( + diffRanges.filter((r) => r.path).map((r) => r.path), + ).size; logger.info( - `Calculating diff ranges for ${branches.base}...${branches.head}`, + `Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).`, ); - const diffRanges = await getPullRequestEditedDiffRanges(branches, logger); + const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges); if (packDir === undefined) { logger.warning( @@ -313,185 +329,6 @@ export async function setupDiffInformedQueryRun( ); } -/** - * Return the file line ranges that were added or modified in the pull request. - * - * @param branches The base and head branches of the pull request. - * @param logger - * @returns An array of tuples, where each tuple contains the absolute path of a - * file, the start line and the end line (both 1-based and inclusive) of an - * added or modified range in that file. Returns `undefined` if the action was - * not triggered by a pull request or if there was an error. - */ -async function getPullRequestEditedDiffRanges( - branches: PullRequestBranches, - logger: Logger, -): Promise { - const fileDiffs = await getFileDiffsWithBasehead(branches, logger); - if (fileDiffs === undefined) { - return undefined; - } - if (fileDiffs.length >= 300) { - // The "compare two commits" API returns a maximum of 300 changed files. If - // we see that many changed files, it is possible that there could be more, - // with the rest being truncated. In this case, we should not attempt to - // compute the diff ranges, as the result would be incomplete. - logger.warning( - `Cannot retrieve the full diff because there are too many ` + - `(${fileDiffs.length}) changed files in the pull request.`, - ); - return undefined; - } - const results: DiffThunkRange[] = []; - for (const filediff of fileDiffs) { - const diffRanges = getDiffRanges(filediff, logger); - if (diffRanges === undefined) { - return undefined; - } - results.push(...diffRanges); - } - return results; -} - -/** - * This interface is an abbreviated version of the file diff object returned by - * the GitHub API. - */ -interface FileDiff { - filename: string; - changes: number; - // A patch may be absent if the file is binary, if the file diff is too large, - // or if the file is unchanged. - patch?: string | undefined; -} - -async function getFileDiffsWithBasehead( - branches: PullRequestBranches, - logger: Logger, -): Promise { - // Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back - // to GITHUB_REPOSITORY. - const repositoryNwo = getRepositoryNwoFromEnv( - "CODE_SCANNING_REPOSITORY", - "GITHUB_REPOSITORY", - ); - const basehead = `${branches.base}...${branches.head}`; - try { - const response = await getApiClient().rest.repos.compareCommitsWithBasehead( - { - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - basehead, - per_page: 1, - }, - ); - logger.debug( - `Response from compareCommitsWithBasehead(${basehead}):` + - `\n${JSON.stringify(response, null, 2)}`, - ); - return response.data.files; - } catch (error: any) { - if (error.status) { - logger.warning(`Error retrieving diff ${basehead}: ${error.message}`); - logger.debug( - `Error running compareCommitsWithBasehead(${basehead}):` + - `\nRequest: ${JSON.stringify(error.request, null, 2)}` + - `\nError Response: ${JSON.stringify(error.response, null, 2)}`, - ); - return undefined; - } else { - throw error; - } - } -} - -function getDiffRanges( - fileDiff: FileDiff, - logger: Logger, -): DiffThunkRange[] | undefined { - // Diff-informed queries expect the file path to be absolute. CodeQL always - // uses forward slashes as the path separator, so on Windows we need to - // replace any backslashes with forward slashes. - const filename = path - .join(getRequiredInput("checkout_path"), fileDiff.filename) - .replaceAll(path.sep, "/"); - - if (fileDiff.patch === undefined) { - if (fileDiff.changes === 0) { - // There are situations where a changed file legitimately has no diff. - // For example, the file may be a binary file, or that the file may have - // been renamed with no changes to its contents. In these cases, the - // file would be reported as having 0 changes, and we can return an empty - // array to indicate no diff range in this file. - return []; - } - // If a file is reported to have nonzero changes but no patch, that may be - // due to the file diff being too large. In this case, we should fall back - // to a special diff range that covers the entire file. - return [ - { - path: filename, - startLine: 0, - endLine: 0, - }, - ]; - } - - // The 1-based file line number of the current line - let currentLine = 0; - // The 1-based file line number that starts the current range of added lines - let additionRangeStartLine: number | undefined = undefined; - const diffRanges: DiffThunkRange[] = []; - - const diffLines = fileDiff.patch.split("\n"); - // Adding a fake context line at the end ensures that the following loop will - // always terminate the last range of added lines. - diffLines.push(" "); - - for (const diffLine of diffLines) { - if (diffLine.startsWith("-")) { - // Ignore deletions completely -- we do not even want to consider them when - // calculating consecutive ranges of added lines. - continue; - } - if (diffLine.startsWith("+")) { - if (additionRangeStartLine === undefined) { - additionRangeStartLine = currentLine; - } - currentLine++; - continue; - } - if (additionRangeStartLine !== undefined) { - // Any line that does not start with a "+" or "-" terminates the current - // range of added lines. - diffRanges.push({ - path: filename, - startLine: additionRangeStartLine, - endLine: currentLine - 1, - }); - additionRangeStartLine = undefined; - } - if (diffLine.startsWith("@@ ")) { - // A new hunk header line resets the current line number. - const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); - if (match === null) { - logger.warning( - `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`, - ); - return undefined; - } - currentLine = parseInt(match[1], 10); - continue; - } - if (diffLine.startsWith(" ")) { - // An unchanged context line advances the current line number. - currentLine++; - continue; - } - } - return diffRanges; -} - /** * Create an extension pack in the temporary directory that contains the file * line ranges that were added or modified in the pull request. @@ -572,10 +409,6 @@ extensions: `Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`, ); - // Write the diff ranges to a JSON file, for action-side alert filtering by the - // upload-lib module. - writeDiffRangesJsonFile(logger, ranges); - return diffRangeDir; } @@ -923,6 +756,4 @@ export async function warnIfGoInstalledAfterInit( } } -export const exportedForTesting = { - getDiffRanges, -}; +export const exportedForTesting = {}; diff --git a/src/codeql.test.ts b/src/codeql.test.ts index a5422b1e38..57b9865f9f 100644 --- a/src/codeql.test.ts +++ b/src/codeql.test.ts @@ -517,6 +517,7 @@ const injectedConfigMacro = test.macro({ "", undefined, undefined, + undefined, getRunnerLogger(true), ); @@ -803,6 +804,7 @@ test("passes a code scanning config AND qlconfig to the CLI", async (t: Executio "", undefined, "/path/to/qlconfig.yml", + undefined, getRunnerLogger(true), ); @@ -831,6 +833,7 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu "", undefined, undefined, // undefined qlconfigFile + undefined, getRunnerLogger(true), ); @@ -1080,6 +1083,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP "sourceRoot", undefined, undefined, + undefined, getRunnerLogger(false), ); diff --git a/src/codeql.ts b/src/codeql.ts index 5241ff2476..c4b536ee4b 100644 --- a/src/codeql.ts +++ b/src/codeql.ts @@ -96,6 +96,7 @@ export interface CodeQL { sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise; /** @@ -560,6 +561,7 @@ export async function getCodeQLForCmd( sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ) { const extraArgs = config.languages.map( @@ -602,6 +604,7 @@ export async function getCodeQLForCmd( const overlayChangesFile = await writeOverlayChangesFile( config, sourceRoot, + prDiffChangedFiles, logger, ); extraArgs.push(`--overlay-changes=${overlayChangesFile}`); diff --git a/src/diff-informed-analysis-utils.test.ts b/src/diff-informed-analysis-utils.test.ts index 1125f18fd0..a67b714b19 100644 --- a/src/diff-informed-analysis-utils.test.ts +++ b/src/diff-informed-analysis-utils.test.ts @@ -4,7 +4,10 @@ import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; import type { PullRequestBranches } from "./actions-util"; import * as apiClient from "./api-client"; -import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils"; +import { + shouldPerformDiffInformedAnalysis, + exportedForTesting as diffExportedForTesting, +} from "./diff-informed-analysis-utils"; import { Feature, Features } from "./feature-flags"; import { getRunnerLogger } from "./logging"; import { parseRepositoryNwo } from "./repository"; @@ -183,3 +186,204 @@ test( }, false, ); + +// --------------------------------------------------------------------------- +// Tests for getDiffRanges (moved from analyze.test.ts after extraction) +// --------------------------------------------------------------------------- +function runGetDiffRanges(changes: number, patch: string[] | undefined): any { + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("checkout_path") + .returns("/checkout/path"); + return diffExportedForTesting.getDiffRanges( + { + filename: "test.txt", + changes, + patch: patch?.join("\n"), + }, + getRunnerLogger(true), + ); +} + +test("getDiffRanges: file unchanged", async (t) => { + const diffRanges = runGetDiffRanges(0, undefined); + t.deepEqual(diffRanges, []); +}); + +test("getDiffRanges: file diff too large", async (t) => { + const diffRanges = runGetDiffRanges(1000000, undefined); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 0, + endLine: 0, + }, + ]); +}); + +test("getDiffRanges: diff thunk with single addition range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,6 +50,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 54, + }, + ]); +}); + +test("getDiffRanges: diff thunk with single deletion range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,8 +50,6 @@", + " a", + " b", + " c", + "-1", + "-2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, []); +}); + +test("getDiffRanges: diff thunk with single update range", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,7 @@", + " a", + " b", + " c", + "-1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 53, + }, + ]); +}); + +test("getDiffRanges: diff thunk with addition ranges", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,9 @@", + " a", + " b", + " c", + "+1", + " c", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 53, + }, + { + path: "/checkout/path/test.txt", + startLine: 55, + endLine: 55, + }, + ]); +}); + +test("getDiffRanges: diff thunk with mixed ranges", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,7 +50,7 @@", + " a", + " b", + " c", + "-1", + " d", + "-2", + "+3", + " e", + " f", + "+4", + "+5", + " g", + " h", + " i", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 54, + endLine: 54, + }, + { + path: "/checkout/path/test.txt", + startLine: 57, + endLine: 58, + }, + ]); +}); + +test("getDiffRanges: multiple diff thunks", async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,6 +50,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + "@@ -130,6 +150,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 54, + }, + { + path: "/checkout/path/test.txt", + startLine: 153, + endLine: 154, + }, + ]); +}); + +test("getDiffRanges: no diff context lines", async (t) => { + const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 50, + endLine: 51, + }, + ]); +}); + +test("getDiffRanges: malformed thunk header", async (t) => { + const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]); + t.deepEqual(diffRanges, undefined); +}); diff --git a/src/diff-informed-analysis-utils.ts b/src/diff-informed-analysis-utils.ts index 7a23b3a295..9395f4e316 100644 --- a/src/diff-informed-analysis-utils.ts +++ b/src/diff-informed-analysis-utils.ts @@ -1,14 +1,28 @@ import * as fs from "fs"; import * as path from "path"; -import * as actionsUtil from "./actions-util"; import type { PullRequestBranches } from "./actions-util"; -import { getGitHubVersion } from "./api-client"; +import * as actionsUtil from "./actions-util"; +import { getRequiredInput } from "./actions-util"; +import { getGitHubVersion, getApiClient } from "./api-client"; import type { CodeQL } from "./codeql"; import { Feature, FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; +import { getRepositoryNwoFromEnv } from "./repository"; import { GitHubVariant, satisfiesGHESVersion } from "./util"; +/** + * This interface is an abbreviated version of the file diff object returned by + * the GitHub API. (Kept internal to this module.) + */ +interface FileDiff { + filename: string; + changes: number; + // A patch may be absent if the file is binary, if the file diff is too large, + // or if the file is unchanged. + patch?: string | undefined; +} + /** * Check if the action should perform diff-informed analysis. */ @@ -93,3 +107,176 @@ export function readDiffRangesJsonFile( ); return JSON.parse(jsonContents) as DiffThunkRange[]; } + +/** + * Return the file line ranges that were added or modified in the pull request. + * + * @param branches The base and head branches of the pull request. + * @param logger + * @returns An array of objects, where each object contains the absolute path of a + * file, the start line and the end line (both 1-based and inclusive) of an + * added or modified range in that file. Returns `undefined` if the action was + * not triggered by a pull request or if there was an error (including API + * truncation conditions). + */ +export async function getPullRequestEditedDiffRanges( + branches: PullRequestBranches, + logger: Logger, +): Promise { + const fileDiffs = await getFileDiffsWithBasehead(branches, logger); + if (fileDiffs === undefined) { + return undefined; + } + if (fileDiffs.length >= 300) { + // The "compare two commits" API returns a maximum of 300 changed files. If + // we see that many changed files, it is possible that there could be more, + // with the rest being truncated. In this case, we should not attempt to + // compute the diff ranges, as the result would be incomplete. + logger.warning( + `Cannot retrieve the full diff because there are too many ` + + `(${fileDiffs.length}) changed files in the pull request.`, + ); + return undefined; + } + const results: DiffThunkRange[] = []; + for (const filediff of fileDiffs) { + const diffRanges = getDiffRanges(filediff, logger); + if (diffRanges === undefined) { + return undefined; + } + results.push(...diffRanges); + } + return results; +} + +async function getFileDiffsWithBasehead( + branches: PullRequestBranches, + logger: Logger, +): Promise { + // Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back + // to GITHUB_REPOSITORY. + const repositoryNwo = getRepositoryNwoFromEnv( + "CODE_SCANNING_REPOSITORY", + "GITHUB_REPOSITORY", + ); + const basehead = `${branches.base}...${branches.head}`; + try { + const response = await getApiClient().rest.repos.compareCommitsWithBasehead( + { + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + basehead, + per_page: 1, + }, + ); + logger.debug( + `Response from compareCommitsWithBasehead(${basehead}):` + + `\n${JSON.stringify(response, null, 2)}`, + ); + return response.data.files; + } catch (error: any) { + if (error.status) { + logger.warning(`Error retrieving diff ${basehead}: ${error.message}`); + logger.debug( + `Error running compareCommitsWithBasehead(${basehead}):` + + `\nRequest: ${JSON.stringify(error.request, null, 2)}` + + `\nError Response: ${JSON.stringify(error.response, null, 2)}`, + ); + return undefined; + } else { + throw error; + } + } +} + +function getDiffRanges( + fileDiff: FileDiff, + logger: Logger, +): DiffThunkRange[] | undefined { + // Diff-informed queries expect the file path to be absolute. CodeQL always + // uses forward slashes as the path separator, so on Windows we need to + // replace any backslashes with forward slashes. + const filename = path + .join(getRequiredInput("checkout_path"), fileDiff.filename) + .replaceAll(path.sep, "/"); + + if (fileDiff.patch === undefined) { + if (fileDiff.changes === 0) { + // There are situations where a changed file legitimately has no diff. + // For example, the file may be a binary file, or that the file may have + // been renamed with no changes to its contents. In these cases, the + // file would be reported as having 0 changes, and we can return an empty + // array to indicate no diff range in this file. + return []; + } + // If a file is reported to have nonzero changes but no patch, that may be + // due to the file diff being too large. In this case, we should fall back + // to a special diff range that covers the entire file. + return [ + { + path: filename, + startLine: 0, + endLine: 0, + }, + ]; + } + + // The 1-based file line number of the current line + let currentLine = 0; + // The 1-based file line number that starts the current range of added lines + let additionRangeStartLine: number | undefined = undefined; + const diffRanges: DiffThunkRange[] = []; + + const diffLines = fileDiff.patch.split("\n"); + // Adding a fake context line at the end ensures that the following loop will + // always terminate the last range of added lines. + diffLines.push(" "); + + for (const diffLine of diffLines) { + if (diffLine.startsWith("-")) { + // Ignore deletions completely -- we do not even want to consider them when + // calculating consecutive ranges of added lines. + continue; + } + if (diffLine.startsWith("+")) { + if (additionRangeStartLine === undefined) { + additionRangeStartLine = currentLine; + } + currentLine++; + continue; + } + if (additionRangeStartLine !== undefined) { + // Any line that does not start with a "+" or "-" terminates the current + // range of added lines. + diffRanges.push({ + path: filename, + startLine: additionRangeStartLine, + endLine: currentLine - 1, + }); + additionRangeStartLine = undefined; + } + if (diffLine.startsWith("@@ ")) { + // A new hunk header line resets the current line number. + const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/); + if (match === null) { + logger.warning( + `Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`, + ); + return undefined; + } + currentLine = parseInt(match[1], 10); + continue; + } + if (diffLine.startsWith(" ")) { + // An unchanged context line advances the current line number. + currentLine++; + continue; + } + } + return diffRanges; +} + +// Export internal helpers for unit testing only (kept stable for existing tests) +export const exportedForTesting = { + getDiffRanges, +}; diff --git a/src/init-action.ts b/src/init-action.ts index 114ad6cab1..a481d28915 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -33,6 +33,11 @@ import { logUnwrittenDiagnostics, makeDiagnostic, } from "./diagnostics"; +import { + getPullRequestEditedDiffRanges, + writeDiffRangesJsonFile, + getDiffInformedAnalysisBranches, +} from "./diff-informed-analysis-utils"; import { EnvVar } from "./environment"; import { Feature, Features } from "./feature-flags"; import { loadPropertiesFromApi } from "./feature-flags/properties"; @@ -45,7 +50,7 @@ import { runDatabaseInitCluster, } from "./init"; import { KnownLanguage } from "./languages"; -import { getActionsLogger, Logger } from "./logging"; +import { getActionsLogger, Logger, withGroupAsync } from "./logging"; import { downloadOverlayBaseDatabaseFromCache, OverlayBaseDatabaseDownloadStats, @@ -175,6 +180,7 @@ async function run() { persistInputs(); let config: configUtils.Config | undefined; + let prDiffChangedFiles: Set | undefined; let codeql: CodeQL; let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined; let toolsFeatureFlagsValid: boolean | undefined; @@ -336,6 +342,12 @@ async function run() { }); await checkInstallPython311(config.languages, codeql); + + prDiffChangedFiles = await computeAndPersistDiffRanges( + codeql, + features, + logger, + ); } catch (unwrappedError) { const error = wrapError(unwrappedError); core.setFailed(error.message); @@ -662,6 +674,7 @@ async function run() { sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger, ); @@ -691,6 +704,7 @@ async function run() { sourceRoot, "Runner.Worker.exe", qlconfigFile, + prDiffChangedFiles, logger, ); } @@ -748,6 +762,45 @@ async function run() { ); } +/** + * Compute and persist diff ranges early during init when diff-informed analysis + * is enabled (feature flag + PR context). This writes the standard pr-diff-range.json + * file for later reuse in the analyze step. Failures are logged but non-fatal. + */ +async function computeAndPersistDiffRanges( + codeql: CodeQL, + features: Features, + logger: Logger, +): Promise | undefined> { + try { + return await withGroupAsync("Compute PR diff ranges", async () => { + const branches = await getDiffInformedAnalysisBranches( + codeql, + features, + logger, + ); + if (!branches) { + return undefined; + } + const ranges = await getPullRequestEditedDiffRanges(branches, logger); + if (ranges === undefined) { + return undefined; + } + writeDiffRangesJsonFile(logger, ranges); + const distinctFiles = new Set(ranges.map((r) => r.path)); + logger.info( + `Persisted ${ranges.length} diff range(s) across ${distinctFiles.size} file(s) for reuse during analyze step.`, + ); + return distinctFiles; + }); + } catch (e) { + logger.warning( + `Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}`, + ); + return undefined; + } +} + function getTrapCachingEnabled(): boolean { // If the workflow specified something always respect that const trapCaching = getOptionalInput("trap-caching"); diff --git a/src/init.ts b/src/init.ts index 687afc1227..8bb4f963e1 100644 --- a/src/init.ts +++ b/src/init.ts @@ -73,6 +73,7 @@ export async function runDatabaseInitCluster( sourceRoot: string, processName: string | undefined, qlconfigFile: string | undefined, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise { fs.mkdirSync(config.dbLocation, { recursive: true }); @@ -84,6 +85,7 @@ export async function runDatabaseInitCluster( sourceRoot, processName, qlconfigFile, + prDiffChangedFiles, logger, ), ); diff --git a/src/overlay-database-utils.test.ts b/src/overlay-database-utils.test.ts index ca52f1d88a..b7b63340fd 100644 --- a/src/overlay-database-utils.test.ts +++ b/src/overlay-database-utils.test.ts @@ -69,6 +69,7 @@ test("writeOverlayChangesFile generates correct changes file", async (t) => { const changesFilePath = await writeOverlayChangesFile( config, sourceRoot, + new Set([]), // The PR didn't touch any files logger, ); getFileOidsStubForOverlay.restore(); diff --git a/src/overlay-database-utils.ts b/src/overlay-database-utils.ts index 1de76fef77..b4a894a124 100644 --- a/src/overlay-database-utils.ts +++ b/src/overlay-database-utils.ts @@ -116,13 +116,51 @@ function getBaseDatabaseOidsFilePath(config: Config): string { export async function writeOverlayChangesFile( config: Config, sourceRoot: string, + prDiffChangedFiles: Set | undefined, logger: Logger, ): Promise { const baseFileOids = await readBaseDatabaseOidsFile(config, logger); const overlayFileOids = await getFileOidsUnderPath(sourceRoot); const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids); + + // Augment changed files with any files that appear in the precomputed PR diff ranges. + // This ensures overlay analysis always includes every file with at least one edited range. + const originalCount = changedFiles.length; + let extraAddedCount = 0; + try { + if (prDiffChangedFiles && prDiffChangedFiles.size > 0) { + const existing = new Set(changedFiles); + for (const f of prDiffChangedFiles) { + if (!existing.has(f)) { + // Only include if file still exists (added/modified) — skip deleted files that might appear in diff. + if ( + overlayFileOids[f] !== undefined || + fs.existsSync(path.join(sourceRoot, f)) + ) { + existing.add(f); + changedFiles.push(f); + extraAddedCount++; + } + } + } + if (extraAddedCount > 0) { + logger.debug( + `Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`, + ); + } else { + logger.debug( + "All diff range files were already present in the diff from the base database.", + ); + } + } + } catch (e) { + logger.debug( + `Failed while attempting to add diff range files in overlay: ${(e as any).message || e}`, + ); + } + logger.info( - `Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`, + `Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`, ); const changedFilesJson = JSON.stringify({ changes: changedFiles });