mirror of
https://github.com/github/codeql-action.git
synced 2025-12-09 09:18:15 +08:00
Compare commits
6 Commits
mbg/csharp
...
alexet/dif
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a8e345008 | ||
|
|
f030ca35d0 | ||
|
|
6a32dd9374 | ||
|
|
4c0acfe29e | ||
|
|
9bb8375aed | ||
|
|
56b69c76aa |
36
lib/analyze-action-post.js
generated
36
lib/analyze-action-post.js
generated
@@ -117761,12 +117761,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path2.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path2.join(
|
||||
@@ -118135,7 +118164,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -118170,6 +118199,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
232
lib/analyze-action.js
generated
232
lib/analyze-action.js
generated
@@ -90112,29 +90112,6 @@ var persistInputs = function() {
|
||||
);
|
||||
core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables));
|
||||
};
|
||||
function getPullRequestBranches() {
|
||||
const pullRequest = github.context.payload.pull_request;
|
||||
if (pullRequest) {
|
||||
return {
|
||||
base: pullRequest.base.ref,
|
||||
// We use the head label instead of the head ref here, because the head
|
||||
// ref lacks owner information and by itself does not uniquely identify
|
||||
// the head branch (which may be in a forked repository).
|
||||
head: pullRequest.head.label
|
||||
};
|
||||
}
|
||||
const codeScanningRef = process.env.CODE_SCANNING_REF;
|
||||
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
|
||||
if (codeScanningRef && codeScanningBaseBranch) {
|
||||
return {
|
||||
base: codeScanningBaseBranch,
|
||||
// PR analysis under Default Setup analyzes the PR head commit instead of
|
||||
// the merge commit, so we can use the provided ref directly.
|
||||
head: codeScanningRef
|
||||
};
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
var qualityCategoryMapping = {
|
||||
"c#": "csharp",
|
||||
cpp: "c-cpp",
|
||||
@@ -90193,6 +90170,9 @@ var path16 = __toESM(require("path"));
|
||||
var import_perf_hooks2 = require("perf_hooks");
|
||||
var io5 = __toESM(require_io());
|
||||
|
||||
// src/autobuild.ts
|
||||
var core11 = __toESM(require_core());
|
||||
|
||||
// src/api-client.ts
|
||||
var core5 = __toESM(require_core());
|
||||
var githubUtils = __toESM(require_utils4());
|
||||
@@ -90362,9 +90342,6 @@ function wrapApiConfigurationError(e) {
|
||||
return e;
|
||||
}
|
||||
|
||||
// src/autobuild.ts
|
||||
var core11 = __toESM(require_core());
|
||||
|
||||
// src/codeql.ts
|
||||
var fs14 = __toESM(require("fs"));
|
||||
var path14 = __toESM(require("path"));
|
||||
@@ -90941,12 +90918,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path7.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path7.join(
|
||||
@@ -91534,34 +91540,9 @@ var GitHubFeatureFlags = class {
|
||||
};
|
||||
|
||||
// src/diff-informed-analysis-utils.ts
|
||||
async function getDiffInformedAnalysisBranches(codeql, features, logger) {
|
||||
if (!await features.getValue("diff_informed_queries" /* DiffInformedQueries */, codeql)) {
|
||||
return void 0;
|
||||
}
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
if (gitHubVersion.type === 1 /* GHES */ && satisfiesGHESVersion(gitHubVersion.version, "<3.19", true)) {
|
||||
return void 0;
|
||||
}
|
||||
const branches = getPullRequestBranches();
|
||||
if (!branches) {
|
||||
logger.info(
|
||||
"Not performing diff-informed analysis because we are not analyzing a pull request."
|
||||
);
|
||||
}
|
||||
return branches;
|
||||
}
|
||||
function getDiffRangesJsonFilePath() {
|
||||
return path9.join(getTemporaryDirectory(), "pr-diff-range.json");
|
||||
}
|
||||
function writeDiffRangesJsonFile(logger, ranges) {
|
||||
const jsonContents = JSON.stringify(ranges, null, 2);
|
||||
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||
fs8.writeFileSync(jsonFilePath, jsonContents);
|
||||
logger.debug(
|
||||
`Wrote pr-diff-range JSON file to ${jsonFilePath}:
|
||||
${jsonContents}`
|
||||
);
|
||||
}
|
||||
function readDiffRangesJsonFile(logger) {
|
||||
const jsonFilePath = getDiffRangesJsonFilePath();
|
||||
if (!fs8.existsSync(jsonFilePath)) {
|
||||
@@ -92832,7 +92813,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -92867,6 +92848,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
@@ -93636,14 +93618,31 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
|
||||
trap_import_duration_ms: Math.round(trapImportTime)
|
||||
};
|
||||
}
|
||||
async function setupDiffInformedQueryRun(branches, logger) {
|
||||
async function setupDiffInformedQueryRun(logger) {
|
||||
return await withGroupAsync(
|
||||
"Generating diff range extension pack",
|
||||
async () => {
|
||||
let diffRanges;
|
||||
try {
|
||||
diffRanges = readDiffRangesJsonFile(logger);
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed to read precomputed diff ranges: ${getErrorMessage(e)}`
|
||||
);
|
||||
diffRanges = void 0;
|
||||
}
|
||||
if (diffRanges === void 0) {
|
||||
logger.info(
|
||||
"No precomputed diff ranges found; skipping diff-informed analysis stage."
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
const fileCount = new Set(
|
||||
diffRanges.filter((r) => r.path).map((r) => r.path)
|
||||
).size;
|
||||
logger.info(
|
||||
`Calculating diff ranges for ${branches.base}...${branches.head}`
|
||||
`Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).`
|
||||
);
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(branches, logger);
|
||||
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
if (packDir === void 0) {
|
||||
logger.warning(
|
||||
@@ -93658,117 +93657,6 @@ async function setupDiffInformedQueryRun(branches, logger) {
|
||||
}
|
||||
);
|
||||
}
|
||||
async function getPullRequestEditedDiffRanges(branches, logger) {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many (${fileDiffs.length}) changed files in the pull request.`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
const results = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === void 0) {
|
||||
return void 0;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async function getFileDiffsWithBasehead(branches, logger) {
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY"
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1
|
||||
}
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):
|
||||
${JSON.stringify(response, null, 2)}`
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error2) {
|
||||
if (error2.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error2.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):
|
||||
Request: ${JSON.stringify(error2.request, null, 2)}
|
||||
Error Response: ${JSON.stringify(error2.response, null, 2)}`
|
||||
);
|
||||
return void 0;
|
||||
} else {
|
||||
throw error2;
|
||||
}
|
||||
}
|
||||
}
|
||||
function getDiffRanges(fileDiff, logger) {
|
||||
const filename = path16.join(getRequiredInput("checkout_path"), fileDiff.filename).replaceAll(path16.sep, "/");
|
||||
if (fileDiff.patch === void 0) {
|
||||
if (fileDiff.changes === 0) {
|
||||
return [];
|
||||
}
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0
|
||||
}
|
||||
];
|
||||
}
|
||||
let currentLine = 0;
|
||||
let additionRangeStartLine = void 0;
|
||||
const diffRanges = [];
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
diffLines.push(" ");
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === void 0) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== void 0) {
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1
|
||||
});
|
||||
additionRangeStartLine = void 0;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
function writeDiffRangeDataExtensionPack(logger, ranges) {
|
||||
if (ranges === void 0) {
|
||||
return void 0;
|
||||
@@ -93817,7 +93705,6 @@ extensions:
|
||||
`Wrote pr-diff-range extension pack to ${extensionFilePath}:
|
||||
${extensionContents}`
|
||||
);
|
||||
writeDiffRangesJsonFile(logger, ranges);
|
||||
return diffRangeDir;
|
||||
}
|
||||
var defaultSuites = /* @__PURE__ */ new Set([
|
||||
@@ -96206,12 +96093,7 @@ async function run() {
|
||||
getOptionalInput("ram") || process.env["CODEQL_RAM"],
|
||||
logger
|
||||
);
|
||||
const branches = await getDiffInformedAnalysisBranches(
|
||||
codeql,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
const diffRangePackDir = branches ? await setupDiffInformedQueryRun(branches, logger) : void 0;
|
||||
const diffRangePackDir = await setupDiffInformedQueryRun(logger);
|
||||
await warnIfGoInstalledAfterInit(config, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await runFinalize(
|
||||
|
||||
36
lib/autobuild-action.js
generated
36
lib/autobuild-action.js
generated
@@ -78501,12 +78501,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path2.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path2.join(
|
||||
@@ -79170,7 +79199,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -79205,6 +79234,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
36
lib/init-action-post.js
generated
36
lib/init-action-post.js
generated
@@ -129185,12 +129185,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path7.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs6.existsSync(path7.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path7.join(
|
||||
@@ -130781,7 +130810,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -130816,6 +130845,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
991
lib/init-action.js
generated
991
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
36
lib/resolve-environment-action.js
generated
36
lib/resolve-environment-action.js
generated
@@ -78494,12 +78494,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path2.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs2.existsSync(path2.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path2.join(
|
||||
@@ -78870,7 +78899,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -78905,6 +78934,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
36
lib/upload-lib.js
generated
36
lib/upload-lib.js
generated
@@ -89289,12 +89289,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path7.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path7.join(
|
||||
@@ -90615,7 +90644,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -90650,6 +90679,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
36
lib/upload-sarif-action.js
generated
36
lib/upload-sarif-action.js
generated
@@ -89204,12 +89204,41 @@ async function readBaseDatabaseOidsFile(config, logger) {
|
||||
function getBaseDatabaseOidsFilePath(config) {
|
||||
return path7.join(config.dbLocation, "base-database-oids.json");
|
||||
}
|
||||
async function writeOverlayChangesFile(config, sourceRoot, logger) {
|
||||
async function writeOverlayChangesFile(config, sourceRoot, prDiffChangedFiles, logger) {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
if (overlayFileOids[f] !== void 0 || fs5.existsSync(path7.join(sourceRoot, f))) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database."
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${e.message || e}`
|
||||
);
|
||||
}
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`
|
||||
);
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
const overlayChangesFile = path7.join(
|
||||
@@ -91287,7 +91316,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
async isScannedLanguage(language) {
|
||||
return !await this.isTracedLanguage(language);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, qlconfigFile, prDiffChangedFiles, logger) {
|
||||
const extraArgs = config.languages.map(
|
||||
(language) => `--language=${language}`
|
||||
);
|
||||
@@ -91322,6 +91351,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
@@ -30,7 +30,6 @@ import {
|
||||
DependencyCacheUploadStatusReport,
|
||||
uploadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { KnownLanguage } from "./languages";
|
||||
@@ -299,14 +298,8 @@ async function run() {
|
||||
logger,
|
||||
);
|
||||
|
||||
const branches = await getDiffInformedAnalysisBranches(
|
||||
codeql,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
const diffRangePackDir = branches
|
||||
? await setupDiffInformedQueryRun(branches, logger)
|
||||
: undefined;
|
||||
// Setup diff informed analysis if needed (based on whether init created the file)
|
||||
const diffRangePackDir = await setupDiffInformedQueryRun(logger);
|
||||
|
||||
await warnIfGoInstalledAfterInit(config, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
|
||||
@@ -4,10 +4,8 @@ import * as path from "path";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import {
|
||||
exportedForTesting,
|
||||
runQueries,
|
||||
defaultSuites,
|
||||
resolveQuerySuiteAlias,
|
||||
@@ -131,204 +129,6 @@ test("status report fields", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return exportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
test("resolveQuerySuiteAlias", (t) => {
|
||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||
for (const suite of defaultSuites) {
|
||||
|
||||
225
src/analyze.ts
225
src/analyze.ts
@@ -6,13 +6,9 @@ import * as io from "@actions/io";
|
||||
import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
PullRequestBranches,
|
||||
} from "./actions-util";
|
||||
import { getTemporaryDirectory } from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
// (getApiClient import removed; no longer needed after diff refactor)
|
||||
import { setupCppAutobuild } from "./autobuild";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
@@ -20,14 +16,14 @@ import { getJavaTempDependencyDir } from "./dependency-caching";
|
||||
import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
||||
import {
|
||||
DiffThunkRange,
|
||||
writeDiffRangesJsonFile,
|
||||
readDiffRangesJsonFile,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
// getRepositoryNwoFromEnv no longer needed after extracting diff logic
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
import { endTracingForCluster } from "./tracer-config";
|
||||
import * as util from "./util";
|
||||
@@ -287,16 +283,36 @@ async function finalizeDatabaseCreation(
|
||||
* the diff range information, or `undefined` if the feature is disabled.
|
||||
*/
|
||||
export async function setupDiffInformedQueryRun(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<string | undefined> {
|
||||
return await withGroupAsync(
|
||||
"Generating diff range extension pack",
|
||||
async () => {
|
||||
// Only use precomputed diff ranges; never recompute here.
|
||||
let diffRanges: DiffThunkRange[] | undefined;
|
||||
try {
|
||||
diffRanges = readDiffRangesJsonFile(logger);
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed to read precomputed diff ranges: ${util.getErrorMessage(e)}`,
|
||||
);
|
||||
diffRanges = undefined;
|
||||
}
|
||||
|
||||
if (diffRanges === undefined) {
|
||||
logger.info(
|
||||
"No precomputed diff ranges found; skipping diff-informed analysis stage.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const fileCount = new Set(
|
||||
diffRanges.filter((r) => r.path).map((r) => r.path),
|
||||
).size;
|
||||
logger.info(
|
||||
`Calculating diff ranges for ${branches.base}...${branches.head}`,
|
||||
`Using precomputed diff ranges (${diffRanges.length} ranges across ${fileCount} files).`,
|
||||
);
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(branches, logger);
|
||||
|
||||
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
if (packDir === undefined) {
|
||||
logger.warning(
|
||||
@@ -313,185 +329,6 @@ export async function setupDiffInformedQueryRun(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API.
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an extension pack in the temporary directory that contains the file
|
||||
* line ranges that were added or modified in the pull request.
|
||||
@@ -572,10 +409,6 @@ extensions:
|
||||
`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`,
|
||||
);
|
||||
|
||||
// Write the diff ranges to a JSON file, for action-side alert filtering by the
|
||||
// upload-lib module.
|
||||
writeDiffRangesJsonFile(logger, ranges);
|
||||
|
||||
return diffRangeDir;
|
||||
}
|
||||
|
||||
@@ -923,6 +756,4 @@ export async function warnIfGoInstalledAfterInit(
|
||||
}
|
||||
}
|
||||
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
export const exportedForTesting = {};
|
||||
|
||||
@@ -517,6 +517,7 @@ const injectedConfigMacro = test.macro({
|
||||
"",
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -803,6 +804,7 @@ test("passes a code scanning config AND qlconfig to the CLI", async (t: Executio
|
||||
"",
|
||||
undefined,
|
||||
"/path/to/qlconfig.yml",
|
||||
undefined,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -831,6 +833,7 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
||||
"",
|
||||
undefined,
|
||||
undefined, // undefined qlconfigFile
|
||||
undefined,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -1080,6 +1083,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP
|
||||
"sourceRoot",
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
getRunnerLogger(false),
|
||||
);
|
||||
|
||||
|
||||
@@ -96,6 +96,7 @@ export interface CodeQL {
|
||||
sourceRoot: string,
|
||||
processName: string | undefined,
|
||||
qlconfigFile: string | undefined,
|
||||
prDiffChangedFiles: Set<string> | undefined,
|
||||
logger: Logger,
|
||||
): Promise<void>;
|
||||
/**
|
||||
@@ -560,6 +561,7 @@ export async function getCodeQLForCmd(
|
||||
sourceRoot: string,
|
||||
processName: string | undefined,
|
||||
qlconfigFile: string | undefined,
|
||||
prDiffChangedFiles: Set<string> | undefined,
|
||||
logger: Logger,
|
||||
) {
|
||||
const extraArgs = config.languages.map(
|
||||
@@ -602,6 +604,7 @@ export async function getCodeQLForCmd(
|
||||
const overlayChangesFile = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
prDiffChangedFiles,
|
||||
logger,
|
||||
);
|
||||
extraArgs.push(`--overlay-changes=${overlayChangesFile}`);
|
||||
|
||||
@@ -4,7 +4,10 @@ import * as sinon from "sinon";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import * as apiClient from "./api-client";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import {
|
||||
shouldPerformDiffInformedAnalysis,
|
||||
exportedForTesting as diffExportedForTesting,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
@@ -183,3 +186,204 @@ test(
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests for getDiffRanges (moved from analyze.test.ts after extraction)
|
||||
// ---------------------------------------------------------------------------
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return diffExportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
@@ -1,14 +1,28 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getRequiredInput } from "./actions-util";
|
||||
import { getGitHubVersion, getApiClient } from "./api-client";
|
||||
import type { CodeQL } from "./codeql";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
import { GitHubVariant, satisfiesGHESVersion } from "./util";
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API. (Kept internal to this module.)
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the action should perform diff-informed analysis.
|
||||
*/
|
||||
@@ -93,3 +107,176 @@ export function readDiffRangesJsonFile(
|
||||
);
|
||||
return JSON.parse(jsonContents) as DiffThunkRange[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of objects, where each object contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error (including API
|
||||
* truncation conditions).
|
||||
*/
|
||||
export async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
// Export internal helpers for unit testing only (kept stable for existing tests)
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
@@ -33,6 +33,11 @@ import {
|
||||
logUnwrittenDiagnostics,
|
||||
makeDiagnostic,
|
||||
} from "./diagnostics";
|
||||
import {
|
||||
getPullRequestEditedDiffRanges,
|
||||
writeDiffRangesJsonFile,
|
||||
getDiffInformedAnalysisBranches,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { loadPropertiesFromApi } from "./feature-flags/properties";
|
||||
@@ -45,7 +50,7 @@ import {
|
||||
runDatabaseInitCluster,
|
||||
} from "./init";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { getActionsLogger, Logger, withGroupAsync } from "./logging";
|
||||
import {
|
||||
downloadOverlayBaseDatabaseFromCache,
|
||||
OverlayBaseDatabaseDownloadStats,
|
||||
@@ -175,6 +180,7 @@ async function run() {
|
||||
persistInputs();
|
||||
|
||||
let config: configUtils.Config | undefined;
|
||||
let prDiffChangedFiles: Set<string> | undefined;
|
||||
let codeql: CodeQL;
|
||||
let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined;
|
||||
let toolsFeatureFlagsValid: boolean | undefined;
|
||||
@@ -336,6 +342,12 @@ async function run() {
|
||||
});
|
||||
|
||||
await checkInstallPython311(config.languages, codeql);
|
||||
|
||||
prDiffChangedFiles = await computeAndPersistDiffRanges(
|
||||
codeql,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
@@ -662,6 +674,7 @@ async function run() {
|
||||
sourceRoot,
|
||||
"Runner.Worker.exe",
|
||||
qlconfigFile,
|
||||
prDiffChangedFiles,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -691,6 +704,7 @@ async function run() {
|
||||
sourceRoot,
|
||||
"Runner.Worker.exe",
|
||||
qlconfigFile,
|
||||
prDiffChangedFiles,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
@@ -748,6 +762,45 @@ async function run() {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute and persist diff ranges early during init when diff-informed analysis
|
||||
* is enabled (feature flag + PR context). This writes the standard pr-diff-range.json
|
||||
* file for later reuse in the analyze step. Failures are logged but non-fatal.
|
||||
*/
|
||||
async function computeAndPersistDiffRanges(
|
||||
codeql: CodeQL,
|
||||
features: Features,
|
||||
logger: Logger,
|
||||
): Promise<Set<string> | undefined> {
|
||||
try {
|
||||
return await withGroupAsync("Compute PR diff ranges", async () => {
|
||||
const branches = await getDiffInformedAnalysisBranches(
|
||||
codeql,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
if (!branches) {
|
||||
return undefined;
|
||||
}
|
||||
const ranges = await getPullRequestEditedDiffRanges(branches, logger);
|
||||
if (ranges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
writeDiffRangesJsonFile(logger, ranges);
|
||||
const distinctFiles = new Set(ranges.map((r) => r.path));
|
||||
logger.info(
|
||||
`Persisted ${ranges.length} diff range(s) across ${distinctFiles.size} file(s) for reuse during analyze step.`,
|
||||
);
|
||||
return distinctFiles;
|
||||
});
|
||||
} catch (e) {
|
||||
logger.warning(
|
||||
`Failed to compute and persist PR diff ranges early: ${getErrorMessage(e)}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function getTrapCachingEnabled(): boolean {
|
||||
// If the workflow specified something always respect that
|
||||
const trapCaching = getOptionalInput("trap-caching");
|
||||
|
||||
@@ -73,6 +73,7 @@ export async function runDatabaseInitCluster(
|
||||
sourceRoot: string,
|
||||
processName: string | undefined,
|
||||
qlconfigFile: string | undefined,
|
||||
prDiffChangedFiles: Set<string> | undefined,
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||
@@ -84,6 +85,7 @@ export async function runDatabaseInitCluster(
|
||||
sourceRoot,
|
||||
processName,
|
||||
qlconfigFile,
|
||||
prDiffChangedFiles,
|
||||
logger,
|
||||
),
|
||||
);
|
||||
|
||||
@@ -69,6 +69,7 @@ test("writeOverlayChangesFile generates correct changes file", async (t) => {
|
||||
const changesFilePath = await writeOverlayChangesFile(
|
||||
config,
|
||||
sourceRoot,
|
||||
new Set([]), // The PR didn't touch any files
|
||||
logger,
|
||||
);
|
||||
getFileOidsStubForOverlay.restore();
|
||||
|
||||
@@ -116,13 +116,51 @@ function getBaseDatabaseOidsFilePath(config: Config): string {
|
||||
export async function writeOverlayChangesFile(
|
||||
config: Config,
|
||||
sourceRoot: string,
|
||||
prDiffChangedFiles: Set<string> | undefined,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
const baseFileOids = await readBaseDatabaseOidsFile(config, logger);
|
||||
const overlayFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||
const changedFiles = computeChangedFiles(baseFileOids, overlayFileOids);
|
||||
|
||||
// Augment changed files with any files that appear in the precomputed PR diff ranges.
|
||||
// This ensures overlay analysis always includes every file with at least one edited range.
|
||||
const originalCount = changedFiles.length;
|
||||
let extraAddedCount = 0;
|
||||
try {
|
||||
if (prDiffChangedFiles && prDiffChangedFiles.size > 0) {
|
||||
const existing = new Set(changedFiles);
|
||||
for (const f of prDiffChangedFiles) {
|
||||
if (!existing.has(f)) {
|
||||
// Only include if file still exists (added/modified) — skip deleted files that might appear in diff.
|
||||
if (
|
||||
overlayFileOids[f] !== undefined ||
|
||||
fs.existsSync(path.join(sourceRoot, f))
|
||||
) {
|
||||
existing.add(f);
|
||||
changedFiles.push(f);
|
||||
extraAddedCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extraAddedCount > 0) {
|
||||
logger.debug(
|
||||
`Added ${extraAddedCount} file(s) from PR diff ranges into overlay: ${changedFiles.slice(-extraAddedCount).join(", ")}`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
"All diff range files were already present in the diff from the base database.",
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed while attempting to add diff range files in overlay: ${(e as any).message || e}`,
|
||||
);
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Found ${changedFiles.length} changed file(s) under ${sourceRoot}.`,
|
||||
`Found ${originalCount} natural changed file(s); added from diff ${extraAddedCount}; total ${changedFiles.length} under ${sourceRoot}.`,
|
||||
);
|
||||
|
||||
const changedFilesJson = JSON.stringify({ changes: changedFiles });
|
||||
|
||||
Reference in New Issue
Block a user