mirror of
https://github.com/github/codeql-action.git
synced 2026-01-04 21:50:17 +08:00
Merge branch 'main' into NlightNFotis/detect_use_proxy_when_streaming
This commit is contained in:
@@ -1,165 +1,12 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { computeAutomationID } from "./api-client";
|
||||
import { EnvVar } from "./environment";
|
||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||
import { initializeEnvironment, withTmpDir } from "./util";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { initializeEnvironment } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
await t.throwsAsync(actionsUtil.getRef);
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const actualRef = await actionsUtil.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await actionsUtil.getRef();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
},
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await actionsUtil.getRef();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
},
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("computeAutomationID()", async (t) => {
|
||||
let actualAutomationID = computeAutomationID(
|
||||
".github/workflows/codeql-analysis.yml:analyze",
|
||||
@@ -215,150 +62,3 @@ test("initializeEnvironment", (t) => {
|
||||
initializeEnvironment("1.2.3");
|
||||
t.deepEqual(process.env[EnvVar.VERSION], "1.2.3");
|
||||
});
|
||||
|
||||
test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(
|
||||
envFile,
|
||||
JSON.stringify({
|
||||
repository: {
|
||||
default_branch: "main",
|
||||
},
|
||||
}),
|
||||
);
|
||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||
|
||||
process.env["GITHUB_REF"] = "main";
|
||||
process.env["GITHUB_SHA"] = "1234";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||
|
||||
fs.writeFileSync(
|
||||
envFile,
|
||||
JSON.stringify({
|
||||
schedule: "0 0 * * *",
|
||||
}),
|
||||
);
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub
|
||||
.withArgs("ref")
|
||||
.resolves("refs/heads/something-else");
|
||||
getAdditionalInputStub
|
||||
.withArgs("sha")
|
||||
.resolves("0000000000000000000000000000000000000000");
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(__dirname);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(0, infoStub.callCount);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
await actionsUtil.determineBaseBranchHeadCommitOid(tmpDir);
|
||||
});
|
||||
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.deepEqual(
|
||||
infoStub.firstCall.args[0],
|
||||
"git call failed. Will calculate the base branch SHA on the server. Error: " +
|
||||
"The checkout path provided to the action does not appear to be a git repository.",
|
||||
);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid other error", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(
|
||||
path.join(__dirname, "../../i-dont-exist"),
|
||||
);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.assert(
|
||||
infoStub.firstCall.args[0].startsWith(
|
||||
"git call failed. Will calculate the base branch SHA on the server. Error: ",
|
||||
),
|
||||
);
|
||||
t.assert(
|
||||
!infoStub.firstCall.args[0].endsWith(
|
||||
"The checkout path provided to the action does not appear to be a git repository.",
|
||||
),
|
||||
);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("decodeGitFilePath unquoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo"), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo bar"), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||
t.deepEqual(
|
||||
actionsUtil.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"),
|
||||
"\\a\\b\\f\\n\\r\\t\\v",
|
||||
);
|
||||
});
|
||||
|
||||
test("decodeGitFilePath quoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo"'), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||
t.deepEqual(
|
||||
actionsUtil.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'),
|
||||
"\x07\b\f\n\r\t\v",
|
||||
);
|
||||
});
|
||||
|
||||
@@ -49,382 +49,6 @@ export function getTemporaryDirectory(): string {
|
||||
: getRequiredEnvParam("RUNNER_TEMP");
|
||||
}
|
||||
|
||||
async function runGitCommand(
|
||||
checkoutPath: string | undefined,
|
||||
args: string[],
|
||||
customErrorMessage: string,
|
||||
): Promise<string> {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||
try {
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString();
|
||||
},
|
||||
stderr: (data) => {
|
||||
stderr += data.toString();
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
let reason = stderr;
|
||||
if (stderr.includes("not a git repository")) {
|
||||
reason =
|
||||
"The checkout path provided to the action does not appear to be a git repository.";
|
||||
}
|
||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
export const getCommitOid = async function (
|
||||
checkoutPath: string,
|
||||
ref = "HEAD",
|
||||
): Promise<string> {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
const stdout = await runGitCommand(
|
||||
checkoutPath,
|
||||
["rev-parse", ref],
|
||||
"Continuing with commit SHA from user input or environment.",
|
||||
);
|
||||
return stdout.trim();
|
||||
} catch {
|
||||
return getOptionalInput("sha") || getRequiredEnvParam("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha at
|
||||
* the head of the base branch, using the merge commit that this workflow analyzes.
|
||||
* Returns undefined if run by other triggers or the base branch commit cannot be
|
||||
* determined.
|
||||
*/
|
||||
export const determineBaseBranchHeadCommitOid = async function (
|
||||
checkoutPathOverride?: string,
|
||||
): Promise<string | undefined> {
|
||||
if (getWorkflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const mergeSha = getRequiredEnvParam("GITHUB_SHA");
|
||||
const checkoutPath =
|
||||
checkoutPathOverride ?? getOptionalInput("checkout_path");
|
||||
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
|
||||
const stdout = await runGitCommand(
|
||||
checkoutPath,
|
||||
["show", "-s", "--format=raw", mergeSha],
|
||||
"Will calculate the base branch SHA on the server.",
|
||||
);
|
||||
|
||||
for (const data of stdout.split("\n")) {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
} else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
} else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (
|
||||
commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40
|
||||
) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deepen the git history of HEAD by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
[
|
||||
"fetch",
|
||||
"origin",
|
||||
"HEAD",
|
||||
"--no-tags",
|
||||
"--no-recurse-submodules",
|
||||
"--deepen=1",
|
||||
],
|
||||
"Cannot deepen the shallow repository.",
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch the given remote branch. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const gitFetch = async function (branch: string, extraFlags: string[]) {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`],
|
||||
`Cannot fetch ${branch}.`,
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Repack the git repository, using with the given flags. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const gitRepack = async function (flags: string[]) {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["repack", ...flags],
|
||||
"Cannot repack the repository.",
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const getAllGitMergeBases = async function (
|
||||
refs: string[],
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const stdout = await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["merge-base", "--all", ...refs],
|
||||
`Cannot get merge base of ${refs}.`,
|
||||
);
|
||||
return stdout.trim().split("\n");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the diff hunk headers between the two given refs.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*
|
||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||
* if the action was not triggered by a pull request, or if the diff could not
|
||||
* be determined.
|
||||
*/
|
||||
export const getGitDiffHunkHeaders = async function (
|
||||
fromRef: string,
|
||||
toRef: string,
|
||||
): Promise<string[] | undefined> {
|
||||
let stdout = "";
|
||||
try {
|
||||
stdout = await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
[
|
||||
"-c",
|
||||
"core.quotePath=false",
|
||||
"diff",
|
||||
"--no-renames",
|
||||
"--irreversible-delete",
|
||||
"-U0",
|
||||
fromRef,
|
||||
toRef,
|
||||
],
|
||||
`Cannot get diff from ${fromRef} to ${toRef}.`,
|
||||
);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const headers: string[] = [];
|
||||
for (const line of stdout.split("\n")) {
|
||||
if (
|
||||
line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@ ")
|
||||
) {
|
||||
headers.push(line);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode, if necessary, a file path produced by Git. See
|
||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||
* for details on how Git encodes file paths with special characters.
|
||||
*
|
||||
* This function works only for Git output with `core.quotePath=false`.
|
||||
*/
|
||||
export const decodeGitFilePath = function (filePath: string): string {
|
||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||
filePath = filePath.substring(1, filePath.length - 1);
|
||||
return filePath.replace(
|
||||
/\\([abfnrtv\\"]|[0-7]{1,3})/g,
|
||||
(_match, seq: string) => {
|
||||
switch (seq[0]) {
|
||||
case "a":
|
||||
return "\x07";
|
||||
case "b":
|
||||
return "\b";
|
||||
case "f":
|
||||
return "\f";
|
||||
case "n":
|
||||
return "\n";
|
||||
case "r":
|
||||
return "\r";
|
||||
case "t":
|
||||
return "\t";
|
||||
case "v":
|
||||
return "\v";
|
||||
case "\\":
|
||||
return "\\";
|
||||
case '"':
|
||||
return '"';
|
||||
default:
|
||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||
// for constructing an entire character at once. If a Unicode
|
||||
// character is encoded as a sequence of escaped bytes, calling these
|
||||
// methods sequentially on the individual byte values would *not*
|
||||
// produce the original multi-byte Unicode character. As a result,
|
||||
// this implementation works only with the Git option core.quotePath
|
||||
// set to false.
|
||||
return String.fromCharCode(parseInt(seq, 8));
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
return filePath;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
export async function getRef(): Promise<string> {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = getOptionalInput("ref");
|
||||
const shaInput = getOptionalInput("sha");
|
||||
const checkoutPath =
|
||||
getOptionalInput("checkout_path") ||
|
||||
getOptionalInput("source-root") ||
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE");
|
||||
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new ConfigurationError(
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
);
|
||||
}
|
||||
|
||||
const ref = refInput || getRefFromEnv();
|
||||
const sha = shaInput || getRequiredEnvParam("GITHUB_SHA");
|
||||
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
// we report back.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
|
||||
const head = await getCommitOid(checkoutPath, "HEAD");
|
||||
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef =
|
||||
sha !== head &&
|
||||
(await getCommitOid(
|
||||
checkoutPath,
|
||||
ref.replace(/^refs\/pull\//, "refs/remotes/pull/"),
|
||||
)) !== head;
|
||||
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(
|
||||
`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`,
|
||||
);
|
||||
return newRef;
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
function getRefFromEnv(): string {
|
||||
// To workaround a limitation of Actions dynamic workflows not setting
|
||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||
let refEnv: string;
|
||||
try {
|
||||
refEnv = getRequiredEnvParam("GITHUB_REF");
|
||||
} catch (e) {
|
||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||
// CODE_SCANNING_REF.
|
||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||
throw e;
|
||||
}
|
||||
refEnv = maybeRef;
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
|
||||
export function getActionVersion(): string {
|
||||
return pkg.version!;
|
||||
}
|
||||
@@ -472,36 +96,6 @@ export function getWorkflowEvent(): any {
|
||||
}
|
||||
}
|
||||
|
||||
function removeRefsHeadsPrefix(ref: string): string {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
|
||||
const event = getWorkflowEvent();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
|
||||
if (getWorkflowEventName() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
|
||||
export async function printDebugLogs(config: Config) {
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as api from "./api-client";
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
setupTests,
|
||||
@@ -31,7 +32,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({} as statusReport.StatusReportBase);
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves();
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as api from "./api-client";
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
setupTests,
|
||||
@@ -47,7 +48,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||
optionalInputStub.withArgs("expect-error").returns("false");
|
||||
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
mockFeatureFlagApiEndpoint(200, {});
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ import * as configUtils from "./config-utils";
|
||||
import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { isScannedLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
@@ -304,33 +305,33 @@ async function getPullRequestEditedDiffRanges(
|
||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||
// topic branch head, so that the PR merge commit is no longer considered a
|
||||
// grafted commit.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
await gitUtils.deepenGitHistory();
|
||||
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
||||
// base branch name is present in the local repository. Normally the base
|
||||
// branch name would be added by Step 4. However, if the base branch head is
|
||||
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
||||
// anything, so we need to fetch the base branch explicitly.
|
||||
await actionsUtil.gitFetch(baseRef, ["--depth=1"]);
|
||||
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
|
||||
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
||||
// that are reachable from the base branch head.
|
||||
await actionsUtil.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||
// are reachable from the PR topic branch head.
|
||||
await actionsUtil.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Repack the history to remove the shallow grafts that were added by
|
||||
// the previous fetches. This step works around a bug that causes subsequent
|
||||
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
||||
// See https://stackoverflow.com/q/63878612
|
||||
await actionsUtil.gitRepack(["-d"]);
|
||||
await gitUtils.gitRepack(["-d"]);
|
||||
// Step 6: Deepen the history so that we have the merge bases between the base
|
||||
// branch and the PR topic branch.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
await gitUtils.deepenGitHistory();
|
||||
|
||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||
// to use the same merge base as GitHub. That is easy to do if there is only
|
||||
// one merge base, which is by far the most common case. If there are multiple
|
||||
// merge bases, we stop without producing a diff range.
|
||||
const mergeBases = await actionsUtil.getAllGitMergeBases([baseRef, headRef]);
|
||||
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
|
||||
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
||||
if (mergeBases.length !== 1) {
|
||||
logger.info(
|
||||
@@ -340,7 +341,7 @@ async function getPullRequestEditedDiffRanges(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const diffHunkHeaders = await actionsUtil.getGitDiffHunkHeaders(
|
||||
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(
|
||||
mergeBases[0],
|
||||
headRef,
|
||||
);
|
||||
@@ -353,7 +354,7 @@ async function getPullRequestEditedDiffRanges(
|
||||
let changedFile = "";
|
||||
for (const line of diffHunkHeaders) {
|
||||
if (line.startsWith("+++ ")) {
|
||||
const filePath = actionsUtil.decodeGitFilePath(line.substring(4));
|
||||
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
|
||||
if (filePath.startsWith("b/")) {
|
||||
// The file was edited: track all hunks in the file
|
||||
changedFile = filePath.substring(2);
|
||||
|
||||
@@ -10,7 +10,6 @@ import {
|
||||
CommandInvocationError,
|
||||
getActionVersion,
|
||||
getOptionalInput,
|
||||
isAnalyzingDefaultBranch,
|
||||
runTool,
|
||||
} from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
@@ -24,6 +23,7 @@ import {
|
||||
Feature,
|
||||
FeatureEnablement,
|
||||
} from "./feature-flags";
|
||||
import { isAnalyzingDefaultBranch } from "./git-utils";
|
||||
import { Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import * as setupCodeql from "./setup-codeql";
|
||||
|
||||
@@ -10,6 +10,7 @@ import * as apiClient from "./api-client";
|
||||
import { setCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { uploadDatabases } from "./database-upload";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { Language } from "./languages";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import {
|
||||
@@ -75,7 +76,7 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("false");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
const loggedMessages = [];
|
||||
await uploadDatabases(
|
||||
@@ -102,7 +103,7 @@ test("Abort database upload if running against GHES", async (t) => {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
const config = getTestConfig(tmpDir);
|
||||
config.gitHubVersion = { type: GitHubVariant.GHES, version: "3.0" };
|
||||
@@ -132,7 +133,7 @@ test("Abort database upload if not analyzing default branch", async (t) => {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
|
||||
const loggedMessages = [];
|
||||
await uploadDatabases(
|
||||
@@ -158,7 +159,7 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
await mockHttpRequests(500);
|
||||
|
||||
@@ -194,7 +195,7 @@ test("Successfully uploading a database to github.com", async (t) => {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
await mockHttpRequests(201);
|
||||
|
||||
@@ -228,7 +229,7 @@ test("Successfully uploading a database to GHEC-DR", async (t) => {
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
const databaseUploadSpy = await mockHttpRequests(201);
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { Logger } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import * as util from "./util";
|
||||
@@ -34,7 +35,7 @@ export async function uploadDatabases(
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||
// We only want to upload a database if we are analyzing the default branch.
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return;
|
||||
@@ -62,7 +63,7 @@ export async function uploadDatabases(
|
||||
const bundledDb = await bundleDb(config, language, codeql, language);
|
||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
const commitOid = await actionsUtil.getCommitOid(
|
||||
const commitOid = await gitUtils.getCommitOid(
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
);
|
||||
try {
|
||||
|
||||
307
src/git-utils.test.ts
Normal file
307
src/git-utils.test.ts
Normal file
@@ -0,0 +1,307 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||
import { withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("getRef() throws on the empty string", async (t) => {
|
||||
process.env["GITHUB_REF"] = "";
|
||||
await t.throwsAsync(gitUtils.getRef);
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("HEAD").resolves(currentSha);
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||
const sha = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||
callback.withArgs("HEAD").resolves(sha);
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||
callback.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||
|
||||
// These values are be ignored
|
||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
|
||||
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||
callback.restore();
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/HEAD";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||
process.env["GITHUB_REF"] = "";
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const expectedRef = "refs/pull/1/merge";
|
||||
const currentSha = "a".repeat(40);
|
||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||
process.env["GITHUB_REF"] = expectedRef;
|
||||
process.env["GITHUB_SHA"] = currentSha;
|
||||
|
||||
const actualRef = await gitUtils.getRef();
|
||||
t.deepEqual(actualRef, expectedRef);
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getRef();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
},
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getRef();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message:
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
},
|
||||
);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const envFile = path.join(tmpDir, "event.json");
|
||||
fs.writeFileSync(
|
||||
envFile,
|
||||
JSON.stringify({
|
||||
repository: {
|
||||
default_branch: "main",
|
||||
},
|
||||
}),
|
||||
);
|
||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||
|
||||
process.env["GITHUB_REF"] = "main";
|
||||
process.env["GITHUB_SHA"] = "1234";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
process.env["GITHUB_REF"] = "feature";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||
|
||||
fs.writeFileSync(
|
||||
envFile,
|
||||
JSON.stringify({
|
||||
schedule: "0 0 * * *",
|
||||
}),
|
||||
);
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||
|
||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
getAdditionalInputStub
|
||||
.withArgs("ref")
|
||||
.resolves("refs/heads/something-else");
|
||||
getAdditionalInputStub
|
||||
.withArgs("sha")
|
||||
.resolves("0000000000000000000000000000000000000000");
|
||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await gitUtils.determineBaseBranchHeadCommitOid(__dirname);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(0, infoStub.callCount);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(tmpDir);
|
||||
});
|
||||
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.deepEqual(
|
||||
infoStub.firstCall.args[0],
|
||||
"git call failed. Will calculate the base branch SHA on the server. Error: " +
|
||||
"The checkout path provided to the action does not appear to be a git repository.",
|
||||
);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("determineBaseBranchHeadCommitOid other error", async (t) => {
|
||||
const infoStub = sinon.stub(core, "info");
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
const result = await gitUtils.determineBaseBranchHeadCommitOid(
|
||||
path.join(__dirname, "../../i-dont-exist"),
|
||||
);
|
||||
t.deepEqual(result, undefined);
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.assert(
|
||||
infoStub.firstCall.args[0].startsWith(
|
||||
"git call failed. Will calculate the base branch SHA on the server. Error: ",
|
||||
),
|
||||
);
|
||||
t.assert(
|
||||
!infoStub.firstCall.args[0].endsWith(
|
||||
"The checkout path provided to the action does not appear to be a git repository.",
|
||||
),
|
||||
);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
test("decodeGitFilePath unquoted strings", async (t) => {
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo"), "foo");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo bar"), "foo bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||
t.deepEqual(
|
||||
gitUtils.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"),
|
||||
"\\a\\b\\f\\n\\r\\t\\v",
|
||||
);
|
||||
});
|
||||
|
||||
test("decodeGitFilePath quoted strings", async (t) => {
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo"'), "foo");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||
t.deepEqual(
|
||||
gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'),
|
||||
"\x07\b\f\n\r\t\v",
|
||||
);
|
||||
});
|
||||
416
src/git-utils.ts
Normal file
416
src/git-utils.ts
Normal file
@@ -0,0 +1,416 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as safeWhich from "@chrisgavin/safe-which";
|
||||
|
||||
import {
|
||||
getOptionalInput,
|
||||
getWorkflowEvent,
|
||||
getWorkflowEventName,
|
||||
} from "./actions-util";
|
||||
import { ConfigurationError, getRequiredEnvParam } from "./util";
|
||||
|
||||
async function runGitCommand(
|
||||
checkoutPath: string | undefined,
|
||||
args: string[],
|
||||
customErrorMessage: string,
|
||||
): Promise<string> {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||
try {
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString();
|
||||
},
|
||||
stderr: (data) => {
|
||||
stderr += data.toString();
|
||||
},
|
||||
},
|
||||
cwd: checkoutPath,
|
||||
}).exec();
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
let reason = stderr;
|
||||
if (stderr.includes("not a git repository")) {
|
||||
reason =
|
||||
"The checkout path provided to the action does not appear to be a git repository.";
|
||||
}
|
||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the SHA of the commit that is currently checked out.
|
||||
*/
|
||||
export const getCommitOid = async function (
|
||||
checkoutPath: string,
|
||||
ref = "HEAD",
|
||||
): Promise<string> {
|
||||
// Try to use git to get the current commit SHA. If that fails then
|
||||
// log but otherwise silently fall back to using the SHA from the environment.
|
||||
// The only time these two values will differ is during analysis of a PR when
|
||||
// the workflow has changed the current commit to the head commit instead of
|
||||
// the merge commit, which must mean that git is available.
|
||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||
// reported on the merge commit.
|
||||
try {
|
||||
const stdout = await runGitCommand(
|
||||
checkoutPath,
|
||||
["rev-parse", ref],
|
||||
"Continuing with commit SHA from user input or environment.",
|
||||
);
|
||||
return stdout.trim();
|
||||
} catch {
|
||||
return getOptionalInput("sha") || getRequiredEnvParam("GITHUB_SHA");
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha at
|
||||
* the head of the base branch, using the merge commit that this workflow analyzes.
|
||||
* Returns undefined if run by other triggers or the base branch commit cannot be
|
||||
* determined.
|
||||
*/
|
||||
export const determineBaseBranchHeadCommitOid = async function (
|
||||
checkoutPathOverride?: string,
|
||||
): Promise<string | undefined> {
|
||||
if (getWorkflowEventName() !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const mergeSha = getRequiredEnvParam("GITHUB_SHA");
|
||||
const checkoutPath =
|
||||
checkoutPathOverride ?? getOptionalInput("checkout_path");
|
||||
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
|
||||
const stdout = await runGitCommand(
|
||||
checkoutPath,
|
||||
["show", "-s", "--format=raw", mergeSha],
|
||||
"Will calculate the base branch SHA on the server.",
|
||||
);
|
||||
|
||||
for (const data of stdout.split("\n")) {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
} else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
} else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (
|
||||
commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40
|
||||
) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Deepen the git history of HEAD by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
[
|
||||
"fetch",
|
||||
"origin",
|
||||
"HEAD",
|
||||
"--no-tags",
|
||||
"--no-recurse-submodules",
|
||||
"--deepen=1",
|
||||
],
|
||||
"Cannot deepen the shallow repository.",
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Fetch the given remote branch. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const gitFetch = async function (branch: string, extraFlags: string[]) {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`],
|
||||
`Cannot fetch ${branch}.`,
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Repack the git repository, using with the given flags. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const gitRepack = async function (flags: string[]) {
|
||||
try {
|
||||
await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["repack", ...flags],
|
||||
"Cannot repack the repository.",
|
||||
);
|
||||
} catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
export const getAllGitMergeBases = async function (
|
||||
refs: string[],
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const stdout = await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
["merge-base", "--all", ...refs],
|
||||
`Cannot get merge base of ${refs}.`,
|
||||
);
|
||||
return stdout.trim().split("\n");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the diff hunk headers between the two given refs.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*
|
||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||
* if the action was not triggered by a pull request, or if the diff could not
|
||||
* be determined.
|
||||
*/
|
||||
export const getGitDiffHunkHeaders = async function (
|
||||
fromRef: string,
|
||||
toRef: string,
|
||||
): Promise<string[] | undefined> {
|
||||
let stdout = "";
|
||||
try {
|
||||
stdout = await runGitCommand(
|
||||
getOptionalInput("checkout_path"),
|
||||
[
|
||||
"-c",
|
||||
"core.quotePath=false",
|
||||
"diff",
|
||||
"--no-renames",
|
||||
"--irreversible-delete",
|
||||
"-U0",
|
||||
fromRef,
|
||||
toRef,
|
||||
],
|
||||
`Cannot get diff from ${fromRef} to ${toRef}.`,
|
||||
);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const headers: string[] = [];
|
||||
for (const line of stdout.split("\n")) {
|
||||
if (
|
||||
line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@ ")
|
||||
) {
|
||||
headers.push(line);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
|
||||
/**
|
||||
* Decode, if necessary, a file path produced by Git. See
|
||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||
* for details on how Git encodes file paths with special characters.
|
||||
*
|
||||
* This function works only for Git output with `core.quotePath=false`.
|
||||
*/
|
||||
export const decodeGitFilePath = function (filePath: string): string {
|
||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||
filePath = filePath.substring(1, filePath.length - 1);
|
||||
return filePath.replace(
|
||||
/\\([abfnrtv\\"]|[0-7]{1,3})/g,
|
||||
(_match, seq: string) => {
|
||||
switch (seq[0]) {
|
||||
case "a":
|
||||
return "\x07";
|
||||
case "b":
|
||||
return "\b";
|
||||
case "f":
|
||||
return "\f";
|
||||
case "n":
|
||||
return "\n";
|
||||
case "r":
|
||||
return "\r";
|
||||
case "t":
|
||||
return "\t";
|
||||
case "v":
|
||||
return "\v";
|
||||
case "\\":
|
||||
return "\\";
|
||||
case '"':
|
||||
return '"';
|
||||
default:
|
||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||
// for constructing an entire character at once. If a Unicode
|
||||
// character is encoded as a sequence of escaped bytes, calling these
|
||||
// methods sequentially on the individual byte values would *not*
|
||||
// produce the original multi-byte Unicode character. As a result,
|
||||
// this implementation works only with the Git option core.quotePath
|
||||
// set to false.
|
||||
return String.fromCharCode(parseInt(seq, 8));
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
return filePath;
|
||||
};
|
||||
|
||||
function getRefFromEnv(): string {
|
||||
// To workaround a limitation of Actions dynamic workflows not setting
|
||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||
let refEnv: string;
|
||||
try {
|
||||
refEnv = getRequiredEnvParam("GITHUB_REF");
|
||||
} catch (e) {
|
||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||
// CODE_SCANNING_REF.
|
||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||
throw e;
|
||||
}
|
||||
refEnv = maybeRef;
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
export async function getRef(): Promise<string> {
|
||||
// Will be in the form "refs/heads/master" on a push event
|
||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||
const refInput = getOptionalInput("ref");
|
||||
const shaInput = getOptionalInput("sha");
|
||||
const checkoutPath =
|
||||
getOptionalInput("checkout_path") ||
|
||||
getOptionalInput("source-root") ||
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE");
|
||||
|
||||
const hasRefInput = !!refInput;
|
||||
const hasShaInput = !!shaInput;
|
||||
// If one of 'ref' or 'sha' are provided, both are required
|
||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||
throw new ConfigurationError(
|
||||
"Both 'ref' and 'sha' are required if one of them is provided.",
|
||||
);
|
||||
}
|
||||
|
||||
const ref = refInput || getRefFromEnv();
|
||||
const sha = shaInput || getRequiredEnvParam("GITHUB_SHA");
|
||||
|
||||
// If the ref is a user-provided input, we have to skip logic
|
||||
// and assume that it is really where they want to upload the results.
|
||||
if (refInput) {
|
||||
return refInput;
|
||||
}
|
||||
|
||||
// For pull request refs we want to detect whether the workflow
|
||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||
// than the 'merge' ref. If so, we want to convert the ref that
|
||||
// we report back.
|
||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||
if (!pull_ref_regex.test(ref)) {
|
||||
return ref;
|
||||
}
|
||||
|
||||
const head = await getCommitOid(checkoutPath, "HEAD");
|
||||
|
||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||
// using GITHUB_REF. There is a subtle race condition where
|
||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||
const hasChangedRef =
|
||||
sha !== head &&
|
||||
(await getCommitOid(
|
||||
checkoutPath,
|
||||
ref.replace(/^refs\/pull\//, "refs/remotes/pull/"),
|
||||
)) !== head;
|
||||
|
||||
if (hasChangedRef) {
|
||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||
core.debug(
|
||||
`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`,
|
||||
);
|
||||
return newRef;
|
||||
} else {
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
function removeRefsHeadsPrefix(ref: string): string {
|
||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we are analyzing the default branch for the repository.
|
||||
*
|
||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||
* environment variable can be set in cases where repository information might not be available, for
|
||||
* example dynamic workflows.
|
||||
*/
|
||||
export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Get the current ref and trim and refs/heads/ prefix
|
||||
let currentRef = await getRef();
|
||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||
|
||||
const event = getWorkflowEvent();
|
||||
let defaultBranch = event?.repository?.default_branch;
|
||||
|
||||
if (getWorkflowEventName() === "schedule") {
|
||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||
}
|
||||
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import * as core from "@actions/core";
|
||||
import {
|
||||
getWorkflowEventName,
|
||||
getOptionalInput,
|
||||
getRef,
|
||||
getWorkflowRunID,
|
||||
getWorkflowRunAttempt,
|
||||
getActionVersion,
|
||||
@@ -16,6 +15,7 @@ import { getAnalysisKey, getApiClient } from "./api-client";
|
||||
import { type Config } from "./config-utils";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import { getRef } from "./git-utils";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
ConfigurationError,
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
} from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { Feature } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {
|
||||
@@ -96,7 +97,7 @@ function getTestConfigWithTempDir(tempDir: string): configUtils.Config {
|
||||
test("check flags for JS, analyzing default branch", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const result = await getTrapCachingExtractorConfigArgsForLang(
|
||||
config,
|
||||
Language.javascript,
|
||||
@@ -112,7 +113,7 @@ test("check flags for JS, analyzing default branch", async (t) => {
|
||||
test("check flags for all, not analyzing default branch", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const result = await getTrapCachingExtractorConfigArgs(config);
|
||||
t.deepEqual(result, [
|
||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||
@@ -139,7 +140,7 @@ test("get languages that support TRAP caching", async (t) => {
|
||||
test("upload cache key contains right fields", async (t) => {
|
||||
const loggedMessages = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(util, "tryGetFolderBytes").resolves(999_999_999);
|
||||
const stubSave = sinon.stub(cache, "saveCache");
|
||||
process.env.GITHUB_SHA = "somesha";
|
||||
@@ -160,7 +161,7 @@ test("download cache looks for the right key and creates dir", async (t) => {
|
||||
const loggedMessages = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||
const stubRestore = sinon.stub(cache, "restoreCache").resolves("found");
|
||||
const eventFile = path.resolve(tmpDir, "event.json");
|
||||
process.env.GITHUB_EVENT_NAME = "pull_request";
|
||||
@@ -200,8 +201,8 @@ test("cleanup removes only old CodeQL TRAP caches", async (t) => {
|
||||
// This config specifies that we are analyzing JavaScript and Ruby, but not Swift.
|
||||
const config = getTestConfigWithTempDir(tmpDir);
|
||||
|
||||
sinon.stub(actionsUtil, "getRef").resolves("refs/heads/main");
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
sinon.stub(gitUtils, "getRef").resolves("refs/heads/main");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const listStub = sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
// Should be kept, since it's not relevant to CodeQL. In reality, the API shouldn't return
|
||||
// this in the first place, but this is a defensive check.
|
||||
|
||||
@@ -9,6 +9,7 @@ import { CodeQL } from "./codeql";
|
||||
import type { Config } from "./config-utils";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
@@ -71,7 +72,7 @@ export async function downloadTrapCaches(
|
||||
result[language] = cacheDir;
|
||||
}
|
||||
|
||||
if (await actionsUtil.isAnalyzingDefaultBranch()) {
|
||||
if (await gitUtils.isAnalyzingDefaultBranch()) {
|
||||
logger.info(
|
||||
"Analyzing default branch. Skipping downloading of TRAP caches.",
|
||||
);
|
||||
@@ -131,7 +132,7 @@ export async function uploadTrapCaches(
|
||||
config: Config,
|
||||
logger: Logger,
|
||||
): Promise<boolean> {
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) return false; // Only upload caches from the default branch
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) return false; // Only upload caches from the default branch
|
||||
|
||||
for (const language of config.languages) {
|
||||
const cacheDir = config.trapCaches[language];
|
||||
@@ -184,7 +185,7 @@ export async function cleanupTrapCaches(
|
||||
trap_cache_cleanup_skipped_because: "feature disabled",
|
||||
};
|
||||
}
|
||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||
return {
|
||||
trap_cache_cleanup_skipped_because: "not analyzing default branch",
|
||||
};
|
||||
@@ -195,7 +196,7 @@ export async function cleanupTrapCaches(
|
||||
|
||||
const allCaches = await apiClient.listActionsCaches(
|
||||
CODEQL_TRAP_CACHE_PREFIX,
|
||||
await actionsUtil.getRef(),
|
||||
await gitUtils.getRef(),
|
||||
);
|
||||
|
||||
for (const language of config.languages) {
|
||||
|
||||
@@ -17,6 +17,7 @@ import { getConfig } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
import * as fingerprints from "./fingerprints";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { initCodeQL } from "./init";
|
||||
import { Logger } from "./logging";
|
||||
import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
@@ -599,8 +600,8 @@ export async function uploadFiles(
|
||||
const checkoutURI = fileUrl(checkoutPath);
|
||||
|
||||
const payload = buildPayload(
|
||||
await actionsUtil.getCommitOid(checkoutPath),
|
||||
await actionsUtil.getRef(),
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
@@ -609,7 +610,7 @@ export async function uploadFiles(
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await actionsUtil.determineBaseBranchHeadCommitOid(),
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
|
||||
Reference in New Issue
Block a user