mirror of
https://github.com/github/codeql-action.git
synced 2025-12-24 08:10:06 +08:00
build: refresh js files
This commit is contained in:
341
lib/actions-util.js
generated
341
lib/actions-util.js
generated
@@ -33,15 +33,13 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||||
exports.getRef = getRef;
|
|
||||||
exports.getActionVersion = getActionVersion;
|
exports.getActionVersion = getActionVersion;
|
||||||
exports.getWorkflowEventName = getWorkflowEventName;
|
exports.getWorkflowEventName = getWorkflowEventName;
|
||||||
exports.isRunningLocalAction = isRunningLocalAction;
|
exports.isRunningLocalAction = isRunningLocalAction;
|
||||||
exports.getRelativeScriptPath = getRelativeScriptPath;
|
exports.getRelativeScriptPath = getRelativeScriptPath;
|
||||||
exports.getWorkflowEvent = getWorkflowEvent;
|
exports.getWorkflowEvent = getWorkflowEvent;
|
||||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
|
||||||
exports.printDebugLogs = printDebugLogs;
|
exports.printDebugLogs = printDebugLogs;
|
||||||
exports.getUploadValue = getUploadValue;
|
exports.getUploadValue = getUploadValue;
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
exports.getWorkflowRunID = getWorkflowRunID;
|
||||||
@@ -90,319 +88,6 @@ function getTemporaryDirectory() {
|
|||||||
? value
|
? value
|
||||||
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
: (0, util_1.getRequiredEnvParam)("RUNNER_TEMP");
|
||||||
}
|
}
|
||||||
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
|
||||||
let stdout = "";
|
|
||||||
let stderr = "";
|
|
||||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
|
||||||
try {
|
|
||||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
|
||||||
silent: true,
|
|
||||||
listeners: {
|
|
||||||
stdout: (data) => {
|
|
||||||
stdout += data.toString();
|
|
||||||
},
|
|
||||||
stderr: (data) => {
|
|
||||||
stderr += data.toString();
|
|
||||||
},
|
|
||||||
},
|
|
||||||
cwd: checkoutPath,
|
|
||||||
}).exec();
|
|
||||||
return stdout;
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
let reason = stderr;
|
|
||||||
if (stderr.includes("not a git repository")) {
|
|
||||||
reason =
|
|
||||||
"The checkout path provided to the action does not appear to be a git repository.";
|
|
||||||
}
|
|
||||||
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the SHA of the commit that is currently checked out.
|
|
||||||
*/
|
|
||||||
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
|
||||||
// Try to use git to get the current commit SHA. If that fails then
|
|
||||||
// log but otherwise silently fall back to using the SHA from the environment.
|
|
||||||
// The only time these two values will differ is during analysis of a PR when
|
|
||||||
// the workflow has changed the current commit to the head commit instead of
|
|
||||||
// the merge commit, which must mean that git is available.
|
|
||||||
// Even if this does go wrong, it's not a huge problem for the alerts to
|
|
||||||
// reported on the merge commit.
|
|
||||||
try {
|
|
||||||
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
|
||||||
return stdout.trim();
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return (0, exports.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.getCommitOid = getCommitOid;
|
|
||||||
/**
|
|
||||||
* If the action was triggered by a pull request, determine the commit sha at
|
|
||||||
* the head of the base branch, using the merge commit that this workflow analyzes.
|
|
||||||
* Returns undefined if run by other triggers or the base branch commit cannot be
|
|
||||||
* determined.
|
|
||||||
*/
|
|
||||||
const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
|
||||||
if (getWorkflowEventName() !== "pull_request") {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
|
||||||
const checkoutPath = checkoutPathOverride ?? (0, exports.getOptionalInput)("checkout_path");
|
|
||||||
try {
|
|
||||||
let commitOid = "";
|
|
||||||
let baseOid = "";
|
|
||||||
let headOid = "";
|
|
||||||
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
|
||||||
for (const data of stdout.split("\n")) {
|
|
||||||
if (data.startsWith("commit ") && commitOid === "") {
|
|
||||||
commitOid = data.substring(7);
|
|
||||||
}
|
|
||||||
else if (data.startsWith("parent ")) {
|
|
||||||
if (baseOid === "") {
|
|
||||||
baseOid = data.substring(7);
|
|
||||||
}
|
|
||||||
else if (headOid === "") {
|
|
||||||
headOid = data.substring(7);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
|
||||||
if (commitOid === mergeSha &&
|
|
||||||
headOid.length === 40 &&
|
|
||||||
baseOid.length === 40) {
|
|
||||||
return baseOid;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
|
||||||
/**
|
|
||||||
* Deepen the git history of HEAD by one level. Errors are logged.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*/
|
|
||||||
const deepenGitHistory = async function () {
|
|
||||||
try {
|
|
||||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), [
|
|
||||||
"fetch",
|
|
||||||
"origin",
|
|
||||||
"HEAD",
|
|
||||||
"--no-tags",
|
|
||||||
"--no-recurse-submodules",
|
|
||||||
"--deepen=1",
|
|
||||||
], "Cannot deepen the shallow repository.");
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// Errors are already logged by runGitCommand()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.deepenGitHistory = deepenGitHistory;
|
|
||||||
/**
|
|
||||||
* Fetch the given remote branch. Errors are logged.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*/
|
|
||||||
const gitFetch = async function (branch, extraFlags) {
|
|
||||||
try {
|
|
||||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// Errors are already logged by runGitCommand()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.gitFetch = gitFetch;
|
|
||||||
/**
|
|
||||||
* Repack the git repository, using with the given flags. Errors are logged.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*/
|
|
||||||
const gitRepack = async function (flags) {
|
|
||||||
try {
|
|
||||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// Errors are already logged by runGitCommand()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.gitRepack = gitRepack;
|
|
||||||
/**
|
|
||||||
* Compute the all merge bases between the given refs. Returns an empty array
|
|
||||||
* if no merge base is found, or if there is an error.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*/
|
|
||||||
const getAllGitMergeBases = async function (refs) {
|
|
||||||
try {
|
|
||||||
const stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
|
||||||
return stdout.trim().split("\n");
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
};
|
|
||||||
exports.getAllGitMergeBases = getAllGitMergeBases;
|
|
||||||
/**
|
|
||||||
* Compute the diff hunk headers between the two given refs.
|
|
||||||
*
|
|
||||||
* This function uses the `checkout_path` to determine the repository path and
|
|
||||||
* works only when called from `analyze` or `upload-sarif`.
|
|
||||||
*
|
|
||||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
|
||||||
* if the action was not triggered by a pull request, or if the diff could not
|
|
||||||
* be determined.
|
|
||||||
*/
|
|
||||||
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
|
||||||
let stdout = "";
|
|
||||||
try {
|
|
||||||
stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), [
|
|
||||||
"-c",
|
|
||||||
"core.quotePath=false",
|
|
||||||
"diff",
|
|
||||||
"--no-renames",
|
|
||||||
"--irreversible-delete",
|
|
||||||
"-U0",
|
|
||||||
fromRef,
|
|
||||||
toRef,
|
|
||||||
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const headers = [];
|
|
||||||
for (const line of stdout.split("\n")) {
|
|
||||||
if (line.startsWith("--- ") ||
|
|
||||||
line.startsWith("+++ ") ||
|
|
||||||
line.startsWith("@@ ")) {
|
|
||||||
headers.push(line);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return headers;
|
|
||||||
};
|
|
||||||
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
|
||||||
/**
|
|
||||||
* Decode, if necessary, a file path produced by Git. See
|
|
||||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
|
||||||
* for details on how Git encodes file paths with special characters.
|
|
||||||
*
|
|
||||||
* This function works only for Git output with `core.quotePath=false`.
|
|
||||||
*/
|
|
||||||
const decodeGitFilePath = function (filePath) {
|
|
||||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
|
||||||
filePath = filePath.substring(1, filePath.length - 1);
|
|
||||||
return filePath.replace(/\\([abfnrtv\\"]|[0-7]{1,3})/g, (_match, seq) => {
|
|
||||||
switch (seq[0]) {
|
|
||||||
case "a":
|
|
||||||
return "\x07";
|
|
||||||
case "b":
|
|
||||||
return "\b";
|
|
||||||
case "f":
|
|
||||||
return "\f";
|
|
||||||
case "n":
|
|
||||||
return "\n";
|
|
||||||
case "r":
|
|
||||||
return "\r";
|
|
||||||
case "t":
|
|
||||||
return "\t";
|
|
||||||
case "v":
|
|
||||||
return "\v";
|
|
||||||
case "\\":
|
|
||||||
return "\\";
|
|
||||||
case '"':
|
|
||||||
return '"';
|
|
||||||
default:
|
|
||||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
|
||||||
// for constructing an entire character at once. If a Unicode
|
|
||||||
// character is encoded as a sequence of escaped bytes, calling these
|
|
||||||
// methods sequentially on the individual byte values would *not*
|
|
||||||
// produce the original multi-byte Unicode character. As a result,
|
|
||||||
// this implementation works only with the Git option core.quotePath
|
|
||||||
// set to false.
|
|
||||||
return String.fromCharCode(parseInt(seq, 8));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return filePath;
|
|
||||||
};
|
|
||||||
exports.decodeGitFilePath = decodeGitFilePath;
|
|
||||||
/**
|
|
||||||
* Get the ref currently being analyzed.
|
|
||||||
*/
|
|
||||||
async function getRef() {
|
|
||||||
// Will be in the form "refs/heads/master" on a push event
|
|
||||||
// or in the form "refs/pull/N/merge" on a pull_request event
|
|
||||||
const refInput = (0, exports.getOptionalInput)("ref");
|
|
||||||
const shaInput = (0, exports.getOptionalInput)("sha");
|
|
||||||
const checkoutPath = (0, exports.getOptionalInput)("checkout_path") ||
|
|
||||||
(0, exports.getOptionalInput)("source-root") ||
|
|
||||||
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
|
||||||
const hasRefInput = !!refInput;
|
|
||||||
const hasShaInput = !!shaInput;
|
|
||||||
// If one of 'ref' or 'sha' are provided, both are required
|
|
||||||
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
|
||||||
throw new util_1.ConfigurationError("Both 'ref' and 'sha' are required if one of them is provided.");
|
|
||||||
}
|
|
||||||
const ref = refInput || getRefFromEnv();
|
|
||||||
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
|
||||||
// If the ref is a user-provided input, we have to skip logic
|
|
||||||
// and assume that it is really where they want to upload the results.
|
|
||||||
if (refInput) {
|
|
||||||
return refInput;
|
|
||||||
}
|
|
||||||
// For pull request refs we want to detect whether the workflow
|
|
||||||
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
|
||||||
// than the 'merge' ref. If so, we want to convert the ref that
|
|
||||||
// we report back.
|
|
||||||
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
|
||||||
if (!pull_ref_regex.test(ref)) {
|
|
||||||
return ref;
|
|
||||||
}
|
|
||||||
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
|
||||||
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
|
||||||
// in actions/checkout@v1 this may not be true as it checks out the repository
|
|
||||||
// using GITHUB_REF. There is a subtle race condition where
|
|
||||||
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
|
||||||
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
|
||||||
const hasChangedRef = sha !== head &&
|
|
||||||
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
|
||||||
if (hasChangedRef) {
|
|
||||||
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
|
||||||
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
|
||||||
return newRef;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return ref;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function getRefFromEnv() {
|
|
||||||
// To workaround a limitation of Actions dynamic workflows not setting
|
|
||||||
// the GITHUB_REF in some cases, we accept also the ref within the
|
|
||||||
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
|
||||||
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
|
||||||
let refEnv;
|
|
||||||
try {
|
|
||||||
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
// If the GITHUB_REF is not set, we try to rescue by getting the
|
|
||||||
// CODE_SCANNING_REF.
|
|
||||||
const maybeRef = process.env["CODE_SCANNING_REF"];
|
|
||||||
if (maybeRef === undefined || maybeRef.length === 0) {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
refEnv = maybeRef;
|
|
||||||
}
|
|
||||||
return refEnv;
|
|
||||||
}
|
|
||||||
function getActionVersion() {
|
function getActionVersion() {
|
||||||
return pkg.version;
|
return pkg.version;
|
||||||
}
|
}
|
||||||
@@ -442,30 +127,6 @@ function getWorkflowEvent() {
|
|||||||
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
throw new Error(`Unable to read workflow event JSON from ${eventJsonFile}: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function removeRefsHeadsPrefix(ref) {
|
|
||||||
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns whether we are analyzing the default branch for the repository.
|
|
||||||
*
|
|
||||||
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
|
||||||
* environment variable can be set in cases where repository information might not be available, for
|
|
||||||
* example dynamic workflows.
|
|
||||||
*/
|
|
||||||
async function isAnalyzingDefaultBranch() {
|
|
||||||
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Get the current ref and trim and refs/heads/ prefix
|
|
||||||
let currentRef = await getRef();
|
|
||||||
currentRef = removeRefsHeadsPrefix(currentRef);
|
|
||||||
const event = getWorkflowEvent();
|
|
||||||
let defaultBranch = event?.repository?.default_branch;
|
|
||||||
if (getWorkflowEventName() === "schedule") {
|
|
||||||
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
|
||||||
}
|
|
||||||
return currentRef === defaultBranch;
|
|
||||||
}
|
|
||||||
async function printDebugLogs(config) {
|
async function printDebugLogs(config) {
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
257
lib/actions-util.test.js
generated
257
lib/actions-util.test.js
generated
@@ -1,169 +1,14 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
||||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
||||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
||||||
}
|
|
||||||
Object.defineProperty(o, k2, desc);
|
|
||||||
}) : (function(o, m, k, k2) {
|
|
||||||
if (k2 === undefined) k2 = k;
|
|
||||||
o[k2] = m[k];
|
|
||||||
}));
|
|
||||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
||||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
||||||
}) : function(o, v) {
|
|
||||||
o["default"] = v;
|
|
||||||
});
|
|
||||||
var __importStar = (this && this.__importStar) || (function () {
|
|
||||||
var ownKeys = function(o) {
|
|
||||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
||||||
var ar = [];
|
|
||||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
||||||
return ar;
|
|
||||||
};
|
|
||||||
return ownKeys(o);
|
|
||||||
};
|
|
||||||
return function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
||||||
__setModuleDefault(result, mod);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
|
||||||
const path = __importStar(require("path"));
|
|
||||||
const core = __importStar(require("@actions/core"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
|
||||||
process.env["GITHUB_REF"] = "";
|
|
||||||
await t.throwsAsync(actionsUtil.getRef);
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const expectedRef = "refs/pull/1/merge";
|
|
||||||
const currentSha = "a".repeat(40);
|
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
|
||||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const expectedRef = "refs/pull/1/merge";
|
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
|
||||||
const sha = "a".repeat(40);
|
|
||||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
|
||||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
|
||||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
|
||||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
|
||||||
callback.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
|
||||||
// These values are be ignored
|
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
|
||||||
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
|
||||||
callback.restore();
|
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const expectedRef = "refs/pull/1/HEAD";
|
|
||||||
const currentSha = "a".repeat(40);
|
|
||||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_REF"] = "";
|
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const expectedRef = "refs/pull/1/merge";
|
|
||||||
const currentSha = "a".repeat(40);
|
|
||||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
|
||||||
const actualRef = await actionsUtil.getRef();
|
|
||||||
t.deepEqual(actualRef, expectedRef);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
|
||||||
await t.throwsAsync(async () => {
|
|
||||||
await actionsUtil.getRef();
|
|
||||||
}, {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
|
||||||
});
|
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
|
||||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
|
||||||
await t.throwsAsync(async () => {
|
|
||||||
await actionsUtil.getRef();
|
|
||||||
}, {
|
|
||||||
instanceOf: Error,
|
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
|
||||||
});
|
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
let actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||||
@@ -184,106 +29,4 @@ const util_1 = require("./util");
|
|||||||
(0, util_1.initializeEnvironment)("1.2.3");
|
(0, util_1.initializeEnvironment)("1.2.3");
|
||||||
t.deepEqual(process.env[environment_1.EnvVar.VERSION], "1.2.3");
|
t.deepEqual(process.env[environment_1.EnvVar.VERSION], "1.2.3");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
|
||||||
const envFile = path.join(tmpDir, "event.json");
|
|
||||||
fs.writeFileSync(envFile, JSON.stringify({
|
|
||||||
repository: {
|
|
||||||
default_branch: "main",
|
|
||||||
},
|
|
||||||
}));
|
|
||||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
|
||||||
process.env["GITHUB_REF"] = "main";
|
|
||||||
process.env["GITHUB_SHA"] = "1234";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
|
||||||
process.env["GITHUB_REF"] = "feature";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
|
||||||
fs.writeFileSync(envFile, JSON.stringify({
|
|
||||||
schedule: "0 0 * * *",
|
|
||||||
}));
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
|
||||||
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
getAdditionalInputStub
|
|
||||||
.withArgs("ref")
|
|
||||||
.resolves("refs/heads/something-else");
|
|
||||||
getAdditionalInputStub
|
|
||||||
.withArgs("sha")
|
|
||||||
.resolves("0000000000000000000000000000000000000000");
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
|
||||||
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
|
||||||
getAdditionalInputStub.restore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
|
||||||
const infoStub = sinon.stub(core, "info");
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
|
||||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
|
||||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(__dirname);
|
|
||||||
t.deepEqual(result, undefined);
|
|
||||||
t.deepEqual(0, infoStub.callCount);
|
|
||||||
infoStub.restore();
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
|
||||||
const infoStub = sinon.stub(core, "info");
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
|
||||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
|
||||||
await actionsUtil.determineBaseBranchHeadCommitOid(tmpDir);
|
|
||||||
});
|
|
||||||
t.deepEqual(1, infoStub.callCount);
|
|
||||||
t.deepEqual(infoStub.firstCall.args[0], "git call failed. Will calculate the base branch SHA on the server. Error: " +
|
|
||||||
"The checkout path provided to the action does not appear to be a git repository.");
|
|
||||||
infoStub.restore();
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("determineBaseBranchHeadCommitOid other error", async (t) => {
|
|
||||||
const infoStub = sinon.stub(core, "info");
|
|
||||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
|
||||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
|
||||||
const result = await actionsUtil.determineBaseBranchHeadCommitOid(path.join(__dirname, "../../i-dont-exist"));
|
|
||||||
t.deepEqual(result, undefined);
|
|
||||||
t.deepEqual(1, infoStub.callCount);
|
|
||||||
t.assert(infoStub.firstCall.args[0].startsWith("git call failed. Will calculate the base branch SHA on the server. Error: "));
|
|
||||||
t.assert(!infoStub.firstCall.args[0].endsWith("The checkout path provided to the action does not appear to be a git repository."));
|
|
||||||
infoStub.restore();
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("decodeGitFilePath unquoted strings", async (t) => {
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo"), "foo");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo bar"), "foo bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"), "\\a\\b\\f\\n\\r\\t\\v");
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("decodeGitFilePath quoted strings", async (t) => {
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo"'), "foo");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo bar"'), "foo bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
|
||||||
t.deepEqual(actionsUtil.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=actions-util.test.js.map
|
//# sourceMappingURL=actions-util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
3
lib/analyze-action-env.test.js
generated
3
lib/analyze-action-env.test.js
generated
@@ -42,6 +42,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const analyze = __importStar(require("./analyze"));
|
const analyze = __importStar(require("./analyze"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const statusReport = __importStar(require("./status-report"));
|
const statusReport = __importStar(require("./status-report"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -61,7 +62,7 @@ const util = __importStar(require("./util"));
|
|||||||
.stub(statusReport, "createStatusReportBase")
|
.stub(statusReport, "createStatusReportBase")
|
||||||
.resolves({});
|
.resolves({});
|
||||||
sinon.stub(statusReport, "sendStatusReport").resolves();
|
sinon.stub(statusReport, "sendStatusReport").resolves();
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const gitHubVersion = {
|
const gitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEhE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
3
lib/analyze-action-input.test.js
generated
3
lib/analyze-action-input.test.js
generated
@@ -42,6 +42,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const analyze = __importStar(require("./analyze"));
|
const analyze = __importStar(require("./analyze"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const statusReport = __importStar(require("./status-report"));
|
const statusReport = __importStar(require("./status-report"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -77,7 +78,7 @@ const util = __importStar(require("./util"));
|
|||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
optionalInputStub.withArgs("expect-error").returns("false");
|
optionalInputStub.withArgs("expect-error").returns("false");
|
||||||
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
process.env["CODEQL_THREADS"] = "1";
|
process.env["CODEQL_THREADS"] = "1";
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,sDAAwC;AACxC,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAChE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,iEAAiE;QACjE,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
19
lib/analyze.js
generated
19
lib/analyze.js
generated
@@ -56,6 +56,7 @@ const codeql_1 = require("./codeql");
|
|||||||
const diagnostics_1 = require("./diagnostics");
|
const diagnostics_1 = require("./diagnostics");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
@@ -190,39 +191,39 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
|||||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||||
// topic branch head, so that the PR merge commit is no longer considered a
|
// topic branch head, so that the PR merge commit is no longer considered a
|
||||||
// grafted commit.
|
// grafted commit.
|
||||||
await actionsUtil.deepenGitHistory();
|
await gitUtils.deepenGitHistory();
|
||||||
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
||||||
// base branch name is present in the local repository. Normally the base
|
// base branch name is present in the local repository. Normally the base
|
||||||
// branch name would be added by Step 4. However, if the base branch head is
|
// branch name would be added by Step 4. However, if the base branch head is
|
||||||
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
||||||
// anything, so we need to fetch the base branch explicitly.
|
// anything, so we need to fetch the base branch explicitly.
|
||||||
await actionsUtil.gitFetch(baseRef, ["--depth=1"]);
|
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
|
||||||
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
||||||
// that are reachable from the base branch head.
|
// that are reachable from the base branch head.
|
||||||
await actionsUtil.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||||
// are reachable from the PR topic branch head.
|
// are reachable from the PR topic branch head.
|
||||||
await actionsUtil.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||||
// Step 5: Repack the history to remove the shallow grafts that were added by
|
// Step 5: Repack the history to remove the shallow grafts that were added by
|
||||||
// the previous fetches. This step works around a bug that causes subsequent
|
// the previous fetches. This step works around a bug that causes subsequent
|
||||||
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
||||||
// See https://stackoverflow.com/q/63878612
|
// See https://stackoverflow.com/q/63878612
|
||||||
await actionsUtil.gitRepack(["-d"]);
|
await gitUtils.gitRepack(["-d"]);
|
||||||
// Step 6: Deepen the history so that we have the merge bases between the base
|
// Step 6: Deepen the history so that we have the merge bases between the base
|
||||||
// branch and the PR topic branch.
|
// branch and the PR topic branch.
|
||||||
await actionsUtil.deepenGitHistory();
|
await gitUtils.deepenGitHistory();
|
||||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||||
// to use the same merge base as GitHub. That is easy to do if there is only
|
// to use the same merge base as GitHub. That is easy to do if there is only
|
||||||
// one merge base, which is by far the most common case. If there are multiple
|
// one merge base, which is by far the most common case. If there are multiple
|
||||||
// merge bases, we stop without producing a diff range.
|
// merge bases, we stop without producing a diff range.
|
||||||
const mergeBases = await actionsUtil.getAllGitMergeBases([baseRef, headRef]);
|
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
|
||||||
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
||||||
if (mergeBases.length !== 1) {
|
if (mergeBases.length !== 1) {
|
||||||
logger.info("Cannot compute diff range because baseRef and headRef " +
|
logger.info("Cannot compute diff range because baseRef and headRef " +
|
||||||
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
|
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const diffHunkHeaders = await actionsUtil.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
||||||
if (diffHunkHeaders === undefined) {
|
if (diffHunkHeaders === undefined) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@@ -230,7 +231,7 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
|||||||
let changedFile = "";
|
let changedFile = "";
|
||||||
for (const line of diffHunkHeaders) {
|
for (const line of diffHunkHeaders) {
|
||||||
if (line.startsWith("+++ ")) {
|
if (line.startsWith("+++ ")) {
|
||||||
const filePath = actionsUtil.decodeGitFilePath(line.substring(4));
|
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
|
||||||
if (filePath.startsWith("b/")) {
|
if (filePath.startsWith("b/")) {
|
||||||
// The file was edited: track all hunks in the file
|
// The file was edited: track all hunks in the file
|
||||||
changedFile = filePath.substring(2);
|
changedFile = filePath.substring(2);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/codeql.js
generated
3
lib/codeql.js
generated
@@ -55,6 +55,7 @@ const cli_errors_1 = require("./cli-errors");
|
|||||||
const doc_url_1 = require("./doc-url");
|
const doc_url_1 = require("./doc-url");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
|
const git_utils_1 = require("./git-utils");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
@@ -830,7 +831,7 @@ async function getTrapCachingExtractorConfigArgsForLang(config, language) {
|
|||||||
const cacheDir = config.trapCaches[language];
|
const cacheDir = config.trapCaches[language];
|
||||||
if (cacheDir === undefined)
|
if (cacheDir === undefined)
|
||||||
return [];
|
return [];
|
||||||
const write = await (0, actions_util_1.isAnalyzingDefaultBranch)();
|
const write = await (0, git_utils_1.isAnalyzingDefaultBranch)();
|
||||||
return [
|
return [
|
||||||
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
||||||
`-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`,
|
`-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
5
lib/database-upload.js
generated
5
lib/database-upload.js
generated
@@ -38,6 +38,7 @@ const fs = __importStar(require("fs"));
|
|||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
||||||
@@ -55,7 +56,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
|||||||
logger.debug("Not running against github.com or GHEC-DR. Skipping upload.");
|
logger.debug("Not running against github.com or GHEC-DR. Skipping upload.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||||
// We only want to upload a database if we are analyzing the default branch.
|
// We only want to upload a database if we are analyzing the default branch.
|
||||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||||
return;
|
return;
|
||||||
@@ -79,7 +80,7 @@ async function uploadDatabases(repositoryNwo, config, apiDetails, logger) {
|
|||||||
const bundledDb = await (0, util_1.bundleDb)(config, language, codeql, language);
|
const bundledDb = await (0, util_1.bundleDb)(config, language, codeql, language);
|
||||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||||
const commitOid = await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path"));
|
const commitOid = await gitUtils.getCommitOid(actionsUtil.getRequiredInput("checkout_path"));
|
||||||
try {
|
try {
|
||||||
await client.request(`POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, {
|
await client.request(`POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, {
|
||||||
baseUrl: uploadsBaseUrl,
|
baseUrl: uploadsBaseUrl,
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAWA,0CAmFC;AA9FD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAIrC,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,WAAW,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACpD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,WAAW,CAAC,YAAY,CAC9C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
{"version":3,"file":"database-upload.js","sourceRoot":"","sources":["../src/database-upload.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAYA,0CAmFC;AA/FD,uCAAyB;AAEzB,4DAA8C;AAC9C,6CAA8D;AAC9D,qCAAqC;AAErC,sDAAwC;AAGxC,6CAA+B;AAC/B,iCAAkD;AAE3C,KAAK,UAAU,eAAe,CACnC,aAA4B,EAC5B,MAAc,EACd,UAA4B,EAC5B,MAAc;IAEd,IAAI,WAAW,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,KAAK,MAAM,EAAE,CAAC;QAC/D,MAAM,CAAC,KAAK,CAAC,wDAAwD,CAAC,CAAC;QACvE,OAAO;IACT,CAAC;IAED,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE,CAAC;QACxB,MAAM,CAAC,KAAK,CAAC,yCAAyC,CAAC,CAAC;QACxD,OAAO;IACT,CAAC;IAED,iDAAiD;IACjD,IACE,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM;QACvD,MAAM,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,UAAU,EAC3D,CAAC;QACD,MAAM,CAAC,KAAK,CAAC,6DAA6D,CAAC,CAAC;QAC5E,OAAO;IACT,CAAC;IAED,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,wBAAwB,EAAE,CAAC,EAAE,CAAC;QACjD,4EAA4E;QAC5E,MAAM,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC/D,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAC9B,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAEjD,MAAM,UAAU,GAAG,IAAI,GAAG,CAAC,IAAA,qBAAc,EAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC;IAC3D,UAAU,CAAC,QAAQ,GAAG,WAAW,UAAU,CAAC,QAAQ,EAAE,CAAC;IAEvD,4DAA4D;IAC5D,0CAA0C;IAC1C,IAAI,cAAc,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC3C,IAAI,cAAc,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC;QACjC,cAAc,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;IAC/C,CAAC;IAED,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE,CAAC;QACxC,IAAI,CAAC;YACH,8BAA8B;YAC9B,2EAA2E;YAC3E,8EAA8E;YAC9E,wEAAwE;YACxE,MAAM,SAAS,GAAG,MAAM,IAAA,eAAQ,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;YACrE,MAAM,aAAa,GAAG,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC;YAClD,MAAM,mBAAmB,GAAG,EAAE,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAC;YAC3D,MAAM,SAAS,GAAG,MAAM,QAAQ,CAAC,YAAY,CAC3C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAC9C,CAAC;YACF,IAAI,CAAC;gBACH,MAAM,MAAM,CAAC,OAAO,CAClB,qGAAqG,EACrG;oBACE,OAAO,EAAE,cAAc;oBACvB,KAAK,EAAE,aAAa,CAAC,KAAK;oBAC1B,IAAI,EAAE,aAAa,CAAC,IAAI;oBACxB,QAAQ;oBACR,IAAI,EAAE,GAAG,QAAQ,WAAW;oBAC5B,UAAU,EAAE,SAAS;oBACrB,IAAI,EAAE,mBAAmB;oBACzB,OAAO,EAAE;wBACP,aAAa,EAAE,SAAS,UAAU,CAAC,IAAI,EAAE;wBACzC,cAAc,EAAE,iBAAiB;wBACjC,gBAAgB,EAAE,aAAa;qBAChC;iBACF,CACF,CAAC;gBACF,MAAM,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;YACjE,CAAC;oBAAS,CAAC;gBACT,mBAAmB,CAAC,KAAK,EAAE,CAAC;YAC9B,CAAC;QACH,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACX,4CAA4C;YAC5C,MAAM,CAAC,OAAO,CAAC,iCAAiC,QAAQ,KAAK,CAAC,EAAE,CAAC,CAAC;QACpE,CAAC;IACH,CAAC;AACH,CAAC"}
|
||||||
13
lib/database-upload.test.js
generated
13
lib/database-upload.test.js
generated
@@ -44,6 +44,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const apiClient = __importStar(require("./api-client"));
|
const apiClient = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const database_upload_1 = require("./database-upload");
|
const database_upload_1 = require("./database-upload");
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
@@ -85,7 +86,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("false");
|
.returns("false");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
@@ -100,7 +101,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const config = getTestConfig(tmpDir);
|
const config = getTestConfig(tmpDir);
|
||||||
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
config.gitHubVersion = { type: util_1.GitHubVariant.GHES, version: "3.0" };
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
@@ -117,7 +118,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
await (0, database_upload_1.uploadDatabases)(testRepoName, getTestConfig(tmpDir), testApiDetails, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
@@ -131,7 +132,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
await mockHttpRequests(500);
|
await mockHttpRequests(500);
|
||||||
(0, codeql_1.setCodeQL)({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle(_, outputFilePath) {
|
async databaseBundle(_, outputFilePath) {
|
||||||
@@ -152,7 +153,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
await mockHttpRequests(201);
|
await mockHttpRequests(201);
|
||||||
(0, codeql_1.setCodeQL)({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle(_, outputFilePath) {
|
async databaseBundle(_, outputFilePath) {
|
||||||
@@ -172,7 +173,7 @@ async function mockHttpRequests(databaseUploadStatusCode) {
|
|||||||
.stub(actionsUtil, "getRequiredInput")
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
.withArgs("upload-database")
|
.withArgs("upload-database")
|
||||||
.returns("true");
|
.returns("true");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const databaseUploadSpy = await mockHttpRequests(201);
|
const databaseUploadSpy = await mockHttpRequests(201);
|
||||||
(0, codeql_1.setCodeQL)({
|
(0, codeql_1.setCodeQL)({
|
||||||
async databaseBundle(_, outputFilePath) {
|
async databaseBundle(_, outputFilePath) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
381
lib/git-utils.js
generated
Normal file
381
lib/git-utils.js
generated
Normal file
@@ -0,0 +1,381 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
|
var ownKeys = function(o) {
|
||||||
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
|
var ar = [];
|
||||||
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = void 0;
|
||||||
|
exports.getRef = getRef;
|
||||||
|
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
|
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
||||||
|
let stdout = "";
|
||||||
|
let stderr = "";
|
||||||
|
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||||
|
try {
|
||||||
|
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data) => {
|
||||||
|
stdout += data.toString();
|
||||||
|
},
|
||||||
|
stderr: (data) => {
|
||||||
|
stderr += data.toString();
|
||||||
|
},
|
||||||
|
},
|
||||||
|
cwd: checkoutPath,
|
||||||
|
}).exec();
|
||||||
|
return stdout;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
let reason = stderr;
|
||||||
|
if (stderr.includes("not a git repository")) {
|
||||||
|
reason =
|
||||||
|
"The checkout path provided to the action does not appear to be a git repository.";
|
||||||
|
}
|
||||||
|
core.info(`git call failed. ${customErrorMessage} Error: ${reason}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Gets the SHA of the commit that is currently checked out.
|
||||||
|
*/
|
||||||
|
const getCommitOid = async function (checkoutPath, ref = "HEAD") {
|
||||||
|
// Try to use git to get the current commit SHA. If that fails then
|
||||||
|
// log but otherwise silently fall back to using the SHA from the environment.
|
||||||
|
// The only time these two values will differ is during analysis of a PR when
|
||||||
|
// the workflow has changed the current commit to the head commit instead of
|
||||||
|
// the merge commit, which must mean that git is available.
|
||||||
|
// Even if this does go wrong, it's not a huge problem for the alerts to
|
||||||
|
// reported on the merge commit.
|
||||||
|
try {
|
||||||
|
const stdout = await runGitCommand(checkoutPath, ["rev-parse", ref], "Continuing with commit SHA from user input or environment.");
|
||||||
|
return stdout.trim();
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return (0, actions_util_1.getOptionalInput)("sha") || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.getCommitOid = getCommitOid;
|
||||||
|
/**
|
||||||
|
* If the action was triggered by a pull request, determine the commit sha at
|
||||||
|
* the head of the base branch, using the merge commit that this workflow analyzes.
|
||||||
|
* Returns undefined if run by other triggers or the base branch commit cannot be
|
||||||
|
* determined.
|
||||||
|
*/
|
||||||
|
const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
||||||
|
if ((0, actions_util_1.getWorkflowEventName)() !== "pull_request") {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
|
const checkoutPath = checkoutPathOverride ?? (0, actions_util_1.getOptionalInput)("checkout_path");
|
||||||
|
try {
|
||||||
|
let commitOid = "";
|
||||||
|
let baseOid = "";
|
||||||
|
let headOid = "";
|
||||||
|
const stdout = await runGitCommand(checkoutPath, ["show", "-s", "--format=raw", mergeSha], "Will calculate the base branch SHA on the server.");
|
||||||
|
for (const data of stdout.split("\n")) {
|
||||||
|
if (data.startsWith("commit ") && commitOid === "") {
|
||||||
|
commitOid = data.substring(7);
|
||||||
|
}
|
||||||
|
else if (data.startsWith("parent ")) {
|
||||||
|
if (baseOid === "") {
|
||||||
|
baseOid = data.substring(7);
|
||||||
|
}
|
||||||
|
else if (headOid === "") {
|
||||||
|
headOid = data.substring(7);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||||
|
if (commitOid === mergeSha &&
|
||||||
|
headOid.length === 40 &&
|
||||||
|
baseOid.length === 40) {
|
||||||
|
return baseOid;
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
||||||
|
/**
|
||||||
|
* Deepen the git history of HEAD by one level. Errors are logged.
|
||||||
|
*
|
||||||
|
* This function uses the `checkout_path` to determine the repository path and
|
||||||
|
* works only when called from `analyze` or `upload-sarif`.
|
||||||
|
*/
|
||||||
|
const deepenGitHistory = async function () {
|
||||||
|
try {
|
||||||
|
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
||||||
|
"fetch",
|
||||||
|
"origin",
|
||||||
|
"HEAD",
|
||||||
|
"--no-tags",
|
||||||
|
"--no-recurse-submodules",
|
||||||
|
"--deepen=1",
|
||||||
|
], "Cannot deepen the shallow repository.");
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
// Errors are already logged by runGitCommand()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.deepenGitHistory = deepenGitHistory;
|
||||||
|
/**
|
||||||
|
* Fetch the given remote branch. Errors are logged.
|
||||||
|
*
|
||||||
|
* This function uses the `checkout_path` to determine the repository path and
|
||||||
|
* works only when called from `analyze` or `upload-sarif`.
|
||||||
|
*/
|
||||||
|
const gitFetch = async function (branch, extraFlags) {
|
||||||
|
try {
|
||||||
|
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
// Errors are already logged by runGitCommand()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.gitFetch = gitFetch;
|
||||||
|
/**
|
||||||
|
* Repack the git repository, using with the given flags. Errors are logged.
|
||||||
|
*
|
||||||
|
* This function uses the `checkout_path` to determine the repository path and
|
||||||
|
* works only when called from `analyze` or `upload-sarif`.
|
||||||
|
*/
|
||||||
|
const gitRepack = async function (flags) {
|
||||||
|
try {
|
||||||
|
await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
// Errors are already logged by runGitCommand()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.gitRepack = gitRepack;
|
||||||
|
/**
|
||||||
|
* Compute the all merge bases between the given refs. Returns an empty array
|
||||||
|
* if no merge base is found, or if there is an error.
|
||||||
|
*
|
||||||
|
* This function uses the `checkout_path` to determine the repository path and
|
||||||
|
* works only when called from `analyze` or `upload-sarif`.
|
||||||
|
*/
|
||||||
|
const getAllGitMergeBases = async function (refs) {
|
||||||
|
try {
|
||||||
|
const stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
||||||
|
return stdout.trim().split("\n");
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
exports.getAllGitMergeBases = getAllGitMergeBases;
|
||||||
|
/**
|
||||||
|
* Compute the diff hunk headers between the two given refs.
|
||||||
|
*
|
||||||
|
* This function uses the `checkout_path` to determine the repository path and
|
||||||
|
* works only when called from `analyze` or `upload-sarif`.
|
||||||
|
*
|
||||||
|
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||||
|
* if the action was not triggered by a pull request, or if the diff could not
|
||||||
|
* be determined.
|
||||||
|
*/
|
||||||
|
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
||||||
|
let stdout = "";
|
||||||
|
try {
|
||||||
|
stdout = await runGitCommand((0, actions_util_1.getOptionalInput)("checkout_path"), [
|
||||||
|
"-c",
|
||||||
|
"core.quotePath=false",
|
||||||
|
"diff",
|
||||||
|
"--no-renames",
|
||||||
|
"--irreversible-delete",
|
||||||
|
"-U0",
|
||||||
|
fromRef,
|
||||||
|
toRef,
|
||||||
|
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const headers = [];
|
||||||
|
for (const line of stdout.split("\n")) {
|
||||||
|
if (line.startsWith("--- ") ||
|
||||||
|
line.startsWith("+++ ") ||
|
||||||
|
line.startsWith("@@ ")) {
|
||||||
|
headers.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return headers;
|
||||||
|
};
|
||||||
|
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
||||||
|
/**
|
||||||
|
* Decode, if necessary, a file path produced by Git. See
|
||||||
|
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||||
|
* for details on how Git encodes file paths with special characters.
|
||||||
|
*
|
||||||
|
* This function works only for Git output with `core.quotePath=false`.
|
||||||
|
*/
|
||||||
|
const decodeGitFilePath = function (filePath) {
|
||||||
|
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||||
|
filePath = filePath.substring(1, filePath.length - 1);
|
||||||
|
return filePath.replace(/\\([abfnrtv\\"]|[0-7]{1,3})/g, (_match, seq) => {
|
||||||
|
switch (seq[0]) {
|
||||||
|
case "a":
|
||||||
|
return "\x07";
|
||||||
|
case "b":
|
||||||
|
return "\b";
|
||||||
|
case "f":
|
||||||
|
return "\f";
|
||||||
|
case "n":
|
||||||
|
return "\n";
|
||||||
|
case "r":
|
||||||
|
return "\r";
|
||||||
|
case "t":
|
||||||
|
return "\t";
|
||||||
|
case "v":
|
||||||
|
return "\v";
|
||||||
|
case "\\":
|
||||||
|
return "\\";
|
||||||
|
case '"':
|
||||||
|
return '"';
|
||||||
|
default:
|
||||||
|
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||||
|
// for constructing an entire character at once. If a Unicode
|
||||||
|
// character is encoded as a sequence of escaped bytes, calling these
|
||||||
|
// methods sequentially on the individual byte values would *not*
|
||||||
|
// produce the original multi-byte Unicode character. As a result,
|
||||||
|
// this implementation works only with the Git option core.quotePath
|
||||||
|
// set to false.
|
||||||
|
return String.fromCharCode(parseInt(seq, 8));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return filePath;
|
||||||
|
};
|
||||||
|
exports.decodeGitFilePath = decodeGitFilePath;
|
||||||
|
function getRefFromEnv() {
|
||||||
|
// To workaround a limitation of Actions dynamic workflows not setting
|
||||||
|
// the GITHUB_REF in some cases, we accept also the ref within the
|
||||||
|
// CODE_SCANNING_REF variable. When possible, however, we prefer to use
|
||||||
|
// the GITHUB_REF as that is a protected variable and cannot be overwritten.
|
||||||
|
let refEnv;
|
||||||
|
try {
|
||||||
|
refEnv = (0, util_1.getRequiredEnvParam)("GITHUB_REF");
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
// If the GITHUB_REF is not set, we try to rescue by getting the
|
||||||
|
// CODE_SCANNING_REF.
|
||||||
|
const maybeRef = process.env["CODE_SCANNING_REF"];
|
||||||
|
if (maybeRef === undefined || maybeRef.length === 0) {
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
refEnv = maybeRef;
|
||||||
|
}
|
||||||
|
return refEnv;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the ref currently being analyzed.
|
||||||
|
*/
|
||||||
|
async function getRef() {
|
||||||
|
// Will be in the form "refs/heads/master" on a push event
|
||||||
|
// or in the form "refs/pull/N/merge" on a pull_request event
|
||||||
|
const refInput = (0, actions_util_1.getOptionalInput)("ref");
|
||||||
|
const shaInput = (0, actions_util_1.getOptionalInput)("sha");
|
||||||
|
const checkoutPath = (0, actions_util_1.getOptionalInput)("checkout_path") ||
|
||||||
|
(0, actions_util_1.getOptionalInput)("source-root") ||
|
||||||
|
(0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE");
|
||||||
|
const hasRefInput = !!refInput;
|
||||||
|
const hasShaInput = !!shaInput;
|
||||||
|
// If one of 'ref' or 'sha' are provided, both are required
|
||||||
|
if ((hasRefInput || hasShaInput) && !(hasRefInput && hasShaInput)) {
|
||||||
|
throw new util_1.ConfigurationError("Both 'ref' and 'sha' are required if one of them is provided.");
|
||||||
|
}
|
||||||
|
const ref = refInput || getRefFromEnv();
|
||||||
|
const sha = shaInput || (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||||
|
// If the ref is a user-provided input, we have to skip logic
|
||||||
|
// and assume that it is really where they want to upload the results.
|
||||||
|
if (refInput) {
|
||||||
|
return refInput;
|
||||||
|
}
|
||||||
|
// For pull request refs we want to detect whether the workflow
|
||||||
|
// has run `git checkout HEAD^2` to analyze the 'head' ref rather
|
||||||
|
// than the 'merge' ref. If so, we want to convert the ref that
|
||||||
|
// we report back.
|
||||||
|
const pull_ref_regex = /refs\/pull\/(\d+)\/merge/;
|
||||||
|
if (!pull_ref_regex.test(ref)) {
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
const head = await (0, exports.getCommitOid)(checkoutPath, "HEAD");
|
||||||
|
// in actions/checkout@v2+ we can check if git rev-parse HEAD == GITHUB_SHA
|
||||||
|
// in actions/checkout@v1 this may not be true as it checks out the repository
|
||||||
|
// using GITHUB_REF. There is a subtle race condition where
|
||||||
|
// git rev-parse GITHUB_REF != GITHUB_SHA, so we must check
|
||||||
|
// git rev-parse GITHUB_REF == git rev-parse HEAD instead.
|
||||||
|
const hasChangedRef = sha !== head &&
|
||||||
|
(await (0, exports.getCommitOid)(checkoutPath, ref.replace(/^refs\/pull\//, "refs/remotes/pull/"))) !== head;
|
||||||
|
if (hasChangedRef) {
|
||||||
|
const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head");
|
||||||
|
core.debug(`No longer on merge commit, rewriting ref from ${ref} to ${newRef}.`);
|
||||||
|
return newRef;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return ref;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function removeRefsHeadsPrefix(ref) {
|
||||||
|
return ref.startsWith("refs/heads/") ? ref.slice("refs/heads/".length) : ref;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Returns whether we are analyzing the default branch for the repository.
|
||||||
|
*
|
||||||
|
* This first checks the environment variable `CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH`. This
|
||||||
|
* environment variable can be set in cases where repository information might not be available, for
|
||||||
|
* example dynamic workflows.
|
||||||
|
*/
|
||||||
|
async function isAnalyzingDefaultBranch() {
|
||||||
|
if (process.env.CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH === "true") {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// Get the current ref and trim and refs/heads/ prefix
|
||||||
|
let currentRef = await getRef();
|
||||||
|
currentRef = removeRefsHeadsPrefix(currentRef);
|
||||||
|
const event = (0, actions_util_1.getWorkflowEvent)();
|
||||||
|
let defaultBranch = event?.repository?.default_branch;
|
||||||
|
if ((0, actions_util_1.getWorkflowEventName)() === "schedule") {
|
||||||
|
defaultBranch = removeRefsHeadsPrefix(getRefFromEnv());
|
||||||
|
}
|
||||||
|
return currentRef === defaultBranch;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=git-utils.js.map
|
||||||
1
lib/git-utils.js.map
Normal file
1
lib/git-utils.js.map
Normal file
File diff suppressed because one or more lines are too long
268
lib/git-utils.test.js
generated
Normal file
268
lib/git-utils.test.js
generated
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || (function () {
|
||||||
|
var ownKeys = function(o) {
|
||||||
|
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||||
|
var ar = [];
|
||||||
|
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
return ownKeys(o);
|
||||||
|
};
|
||||||
|
return function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const util_1 = require("./util");
|
||||||
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
|
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||||
|
process.env["GITHUB_REF"] = "";
|
||||||
|
await t.throwsAsync(gitUtils.getRef);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const expectedRef = "refs/pull/1/merge";
|
||||||
|
const currentSha = "a".repeat(40);
|
||||||
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||||
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const expectedRef = "refs/pull/1/merge";
|
||||||
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
|
const sha = "a".repeat(40);
|
||||||
|
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||||
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
|
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||||
|
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
|
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
|
callback.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||||
|
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||||
|
// These values are be ignored
|
||||||
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
|
const callback = sinon.stub(gitUtils, "getCommitOid");
|
||||||
|
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||||
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||||
|
callback.restore();
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const expectedRef = "refs/pull/1/HEAD";
|
||||||
|
const currentSha = "a".repeat(40);
|
||||||
|
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_REF"] = "";
|
||||||
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const expectedRef = "refs/pull/1/merge";
|
||||||
|
const currentSha = "a".repeat(40);
|
||||||
|
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||||
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
const actualRef = await gitUtils.getRef();
|
||||||
|
t.deepEqual(actualRef, expectedRef);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||||
|
await t.throwsAsync(async () => {
|
||||||
|
await gitUtils.getRef();
|
||||||
|
}, {
|
||||||
|
instanceOf: Error,
|
||||||
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
|
});
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getRef() throws an error if only `sha` is provided as an input", async (t) => {
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||||
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
|
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||||
|
await t.throwsAsync(async () => {
|
||||||
|
await gitUtils.getRef();
|
||||||
|
}, {
|
||||||
|
instanceOf: Error,
|
||||||
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
|
});
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||||
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const envFile = path.join(tmpDir, "event.json");
|
||||||
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
|
repository: {
|
||||||
|
default_branch: "main",
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||||
|
process.env["GITHUB_REF"] = "main";
|
||||||
|
process.env["GITHUB_SHA"] = "1234";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||||
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||||
|
process.env["GITHUB_REF"] = "feature";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||||
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
|
schedule: "0 0 * * *",
|
||||||
|
}));
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
|
||||||
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
|
getAdditionalInputStub
|
||||||
|
.withArgs("ref")
|
||||||
|
.resolves("refs/heads/something-else");
|
||||||
|
getAdditionalInputStub
|
||||||
|
.withArgs("sha")
|
||||||
|
.resolves("0000000000000000000000000000000000000000");
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
|
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), false);
|
||||||
|
getAdditionalInputStub.restore();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
|
||||||
|
const infoStub = sinon.stub(core, "info");
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "hucairz";
|
||||||
|
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||||
|
const result = await gitUtils.determineBaseBranchHeadCommitOid(__dirname);
|
||||||
|
t.deepEqual(result, undefined);
|
||||||
|
t.deepEqual(0, infoStub.callCount);
|
||||||
|
infoStub.restore();
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("determineBaseBranchHeadCommitOid not git repository", async (t) => {
|
||||||
|
const infoStub = sinon.stub(core, "info");
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||||
|
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||||
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
await gitUtils.determineBaseBranchHeadCommitOid(tmpDir);
|
||||||
|
});
|
||||||
|
t.deepEqual(1, infoStub.callCount);
|
||||||
|
t.deepEqual(infoStub.firstCall.args[0], "git call failed. Will calculate the base branch SHA on the server. Error: " +
|
||||||
|
"The checkout path provided to the action does not appear to be a git repository.");
|
||||||
|
infoStub.restore();
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("determineBaseBranchHeadCommitOid other error", async (t) => {
|
||||||
|
const infoStub = sinon.stub(core, "info");
|
||||||
|
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||||
|
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||||
|
const result = await gitUtils.determineBaseBranchHeadCommitOid(path.join(__dirname, "../../i-dont-exist"));
|
||||||
|
t.deepEqual(result, undefined);
|
||||||
|
t.deepEqual(1, infoStub.callCount);
|
||||||
|
t.assert(infoStub.firstCall.args[0].startsWith("git call failed. Will calculate the base branch SHA on the server. Error: "));
|
||||||
|
t.assert(!infoStub.firstCall.args[0].endsWith("The checkout path provided to the action does not appear to be a git repository."));
|
||||||
|
infoStub.restore();
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("decodeGitFilePath unquoted strings", async (t) => {
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo"), "foo");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo bar"), "foo bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"), "\\a\\b\\f\\n\\r\\t\\v");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("decodeGitFilePath quoted strings", async (t) => {
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo"'), "foo");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||||
|
t.deepEqual(gitUtils.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=git-utils.test.js.map
|
||||||
1
lib/git-utils.test.js.map
Normal file
1
lib/git-utils.test.js.map
Normal file
File diff suppressed because one or more lines are too long
3
lib/status-report.js
generated
3
lib/status-report.js
generated
@@ -45,6 +45,7 @@ const actions_util_1 = require("./actions-util");
|
|||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const doc_url_1 = require("./doc-url");
|
const doc_url_1 = require("./doc-url");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
|
const git_utils_1 = require("./git-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
var ActionName;
|
var ActionName;
|
||||||
(function (ActionName) {
|
(function (ActionName) {
|
||||||
@@ -125,7 +126,7 @@ function setJobStatusIfUnsuccessful(actionStatus) {
|
|||||||
async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception) {
|
async function createStatusReportBase(actionName, status, actionStartedAt, config, diskInfo, logger, cause, exception) {
|
||||||
try {
|
try {
|
||||||
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||||
const ref = await (0, actions_util_1.getRef)();
|
const ref = await (0, git_utils_1.getRef)();
|
||||||
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
||||||
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
||||||
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
9
lib/trap-caching.js
generated
9
lib/trap-caching.js
generated
@@ -44,6 +44,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const apiClient = __importStar(require("./api-client"));
|
const apiClient = __importStar(require("./api-client"));
|
||||||
const doc_url_1 = require("./doc-url");
|
const doc_url_1 = require("./doc-url");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
// This constant should be bumped if we make a breaking change
|
// This constant should be bumped if we make a breaking change
|
||||||
// to how the CodeQL Action stores or retrieves the TRAP cache,
|
// to how the CodeQL Action stores or retrieves the TRAP cache,
|
||||||
@@ -80,7 +81,7 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
|||||||
fs.mkdirSync(cacheDir, { recursive: true });
|
fs.mkdirSync(cacheDir, { recursive: true });
|
||||||
result[language] = cacheDir;
|
result[language] = cacheDir;
|
||||||
}
|
}
|
||||||
if (await actionsUtil.isAnalyzingDefaultBranch()) {
|
if (await gitUtils.isAnalyzingDefaultBranch()) {
|
||||||
logger.info("Analyzing default branch. Skipping downloading of TRAP caches.");
|
logger.info("Analyzing default branch. Skipping downloading of TRAP caches.");
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -122,7 +123,7 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
|||||||
* @returns Whether the TRAP caches were uploaded.
|
* @returns Whether the TRAP caches were uploaded.
|
||||||
*/
|
*/
|
||||||
async function uploadTrapCaches(codeql, config, logger) {
|
async function uploadTrapCaches(codeql, config, logger) {
|
||||||
if (!(await actionsUtil.isAnalyzingDefaultBranch()))
|
if (!(await gitUtils.isAnalyzingDefaultBranch()))
|
||||||
return false; // Only upload caches from the default branch
|
return false; // Only upload caches from the default branch
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const cacheDir = config.trapCaches[language];
|
const cacheDir = config.trapCaches[language];
|
||||||
@@ -151,14 +152,14 @@ async function cleanupTrapCaches(config, features, logger) {
|
|||||||
trap_cache_cleanup_skipped_because: "feature disabled",
|
trap_cache_cleanup_skipped_because: "feature disabled",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
if (!(await actionsUtil.isAnalyzingDefaultBranch())) {
|
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||||
return {
|
return {
|
||||||
trap_cache_cleanup_skipped_because: "not analyzing default branch",
|
trap_cache_cleanup_skipped_because: "not analyzing default branch",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
let totalBytesCleanedUp = 0;
|
let totalBytesCleanedUp = 0;
|
||||||
const allCaches = await apiClient.listActionsCaches(CODEQL_TRAP_CACHE_PREFIX, await actionsUtil.getRef());
|
const allCaches = await apiClient.listActionsCaches(CODEQL_TRAP_CACHE_PREFIX, await gitUtils.getRef());
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
if (config.trapCaches[language]) {
|
if (config.trapCaches[language]) {
|
||||||
const cachesToRemove = await getTrapCachesForLanguage(allCaches, language, logger);
|
const cachesToRemove = await getTrapCachesForLanguage(allCaches, language, logger);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
13
lib/trap-caching.test.js
generated
13
lib/trap-caching.test.js
generated
@@ -45,6 +45,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const apiClient = __importStar(require("./api-client"));
|
const apiClient = __importStar(require("./api-client"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
@@ -111,7 +112,7 @@ function getTestConfigWithTempDir(tempDir) {
|
|||||||
(0, ava_1.default)("check flags for JS, analyzing default branch", async (t) => {
|
(0, ava_1.default)("check flags for JS, analyzing default branch", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfigWithTempDir(tmpDir);
|
const config = getTestConfigWithTempDir(tmpDir);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgsForLang)(config, languages_1.Language.javascript);
|
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgsForLang)(config, languages_1.Language.javascript);
|
||||||
t.deepEqual(result, [
|
t.deepEqual(result, [
|
||||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||||
@@ -123,7 +124,7 @@ function getTestConfigWithTempDir(tempDir) {
|
|||||||
(0, ava_1.default)("check flags for all, not analyzing default branch", async (t) => {
|
(0, ava_1.default)("check flags for all, not analyzing default branch", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfigWithTempDir(tmpDir);
|
const config = getTestConfigWithTempDir(tmpDir);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgs)(config);
|
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgs)(config);
|
||||||
t.deepEqual(result, [
|
t.deepEqual(result, [
|
||||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||||
@@ -144,7 +145,7 @@ function getTestConfigWithTempDir(tempDir) {
|
|||||||
(0, ava_1.default)("upload cache key contains right fields", async (t) => {
|
(0, ava_1.default)("upload cache key contains right fields", async (t) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
sinon.stub(util, "tryGetFolderBytes").resolves(999_999_999);
|
sinon.stub(util, "tryGetFolderBytes").resolves(999_999_999);
|
||||||
const stubSave = sinon.stub(cache, "saveCache");
|
const stubSave = sinon.stub(cache, "saveCache");
|
||||||
process.env.GITHUB_SHA = "somesha";
|
process.env.GITHUB_SHA = "somesha";
|
||||||
@@ -159,7 +160,7 @@ function getTestConfigWithTempDir(tempDir) {
|
|||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
const logger = (0, testing_utils_1.getRecordingLogger)(loggedMessages);
|
||||||
sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir);
|
sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
const stubRestore = sinon.stub(cache, "restoreCache").resolves("found");
|
const stubRestore = sinon.stub(cache, "restoreCache").resolves("found");
|
||||||
const eventFile = path.resolve(tmpDir, "event.json");
|
const eventFile = path.resolve(tmpDir, "event.json");
|
||||||
process.env.GITHUB_EVENT_NAME = "pull_request";
|
process.env.GITHUB_EVENT_NAME = "pull_request";
|
||||||
@@ -185,8 +186,8 @@ function getTestConfigWithTempDir(tempDir) {
|
|||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
// This config specifies that we are analyzing JavaScript and Ruby, but not Swift.
|
// This config specifies that we are analyzing JavaScript and Ruby, but not Swift.
|
||||||
const config = getTestConfigWithTempDir(tmpDir);
|
const config = getTestConfigWithTempDir(tmpDir);
|
||||||
sinon.stub(actionsUtil, "getRef").resolves("refs/heads/main");
|
sinon.stub(gitUtils, "getRef").resolves("refs/heads/main");
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const listStub = sinon.stub(apiClient, "listActionsCaches").resolves([
|
const listStub = sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||||
// Should be kept, since it's not relevant to CodeQL. In reality, the API shouldn't return
|
// Should be kept, since it's not relevant to CodeQL. In reality, the API shouldn't return
|
||||||
// this in the first place, but this is a defensive check.
|
// this in the first place, but this is a defensive check.
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/upload-lib.js
generated
3
lib/upload-lib.js
generated
@@ -60,6 +60,7 @@ const codeql_1 = require("./codeql");
|
|||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const fingerprints = __importStar(require("./fingerprints"));
|
const fingerprints = __importStar(require("./fingerprints"));
|
||||||
|
const gitUtils = __importStar(require("./git-utils"));
|
||||||
const init_1 = require("./init");
|
const init_1 = require("./init");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const tools_features_1 = require("./tools-features");
|
const tools_features_1 = require("./tools-features");
|
||||||
@@ -423,7 +424,7 @@ async function uploadFiles(sarifPath, checkoutPath, category, features, logger)
|
|||||||
logger.debug(`Compressing serialized SARIF`);
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||||
const checkoutURI = (0, file_url_1.default)(checkoutPath);
|
const checkoutURI = (0, file_url_1.default)(checkoutPath);
|
||||||
const payload = buildPayload(await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), analysisKey, util.getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, environment, toolNames, await actionsUtil.determineBaseBranchHeadCommitOid());
|
const payload = buildPayload(await gitUtils.getCommitOid(checkoutPath), await gitUtils.getRef(), analysisKey, util.getRequiredEnvParam("GITHUB_WORKFLOW"), zippedSarif, actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutURI, environment, toolNames, await gitUtils.determineBaseBranchHeadCommitOid());
|
||||||
// Log some useful debug info about the info
|
// Log some useful debug info about the info
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user