mirror of
https://github.com/github/codeql-action.git
synced 2025-12-16 20:39:23 +08:00
Compare commits
17 Commits
v2.21.0
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b6664fa89 | ||
|
|
fda93d8ef5 | ||
|
|
18ae9813bf | ||
|
|
ce84bed594 | ||
|
|
d0dd7d77b4 | ||
|
|
466dbc913e | ||
|
|
bac7c32ff7 | ||
|
|
5658fd1df2 | ||
|
|
2637069a45 | ||
|
|
0bc4788cf7 | ||
|
|
004f976bef | ||
|
|
d577d6f6b1 | ||
|
|
9922e17dbb | ||
|
|
bf162c4114 | ||
|
|
79ad6142b1 | ||
|
|
6484fb09f6 | ||
|
|
c1144b5fa0 |
@@ -22,6 +22,7 @@
|
|||||||
}],
|
}],
|
||||||
"import/no-amd": "error",
|
"import/no-amd": "error",
|
||||||
"import/no-commonjs": "error",
|
"import/no-commonjs": "error",
|
||||||
|
"import/no-cycle": "error",
|
||||||
"import/no-dynamic-require": "error",
|
"import/no-dynamic-require": "error",
|
||||||
// Disable the rule that checks that devDependencies aren't imported since we use a single
|
// Disable the rule that checks that devDependencies aren't imported since we use a single
|
||||||
// linting configuration file for both source and test code.
|
// linting configuration file for both source and test code.
|
||||||
|
|||||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -130,7 +130,7 @@ jobs:
|
|||||||
npm version patch --no-git-tag-version
|
npm version patch --no-git-tag-version
|
||||||
|
|
||||||
# Update the changelog
|
# Update the changelog
|
||||||
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==3)' CHANGELOG.md
|
perl -i -pe 's/^/## \[UNRELEASED\]\n\nNo user facing changes.\n\n/ if($.==5)' CHANGELOG.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "Update changelog and version after ${VERSION}"
|
git commit -m "Update changelog and version after ${VERSION}"
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,10 @@
|
|||||||
|
|
||||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||||
|
|
||||||
|
## [UNRELEASED]
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
## 2.21.0 - 19 Jul 2023
|
## 2.21.0 - 19 Jul 2023
|
||||||
|
|
||||||
- CodeQL Action now requires CodeQL CLI 2.9.4 or later. For more information, see the corresponding changelog entry for CodeQL Action version 2.20.4. [#1724](https://github.com/github/codeql-action/pull/1724)
|
- CodeQL Action now requires CodeQL CLI 2.9.4 or later. For more information, see the corresponding changelog entry for CodeQL Action version 2.20.4. [#1724](https://github.com/github/codeql-action/pull/1724)
|
||||||
|
|||||||
234
lib/actions-util.js
generated
234
lib/actions-util.js
generated
@@ -23,17 +23,13 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.getWorkflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionVersion = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
exports.getWorkflowRunAttempt = exports.getWorkflowRunID = exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.getWorkflowEventName = exports.getActionVersion = exports.getActionsStatus = exports.getRef = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
const api = __importStar(require("./api-client"));
|
|
||||||
const environment_1 = require("./environment");
|
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
const workflow_1 = require("./workflow");
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
/**
|
/**
|
||||||
@@ -150,51 +146,6 @@ const determineMergeBaseCommitOid = async function () {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||||
/**
|
|
||||||
* Get the analysis key parameter for the current job.
|
|
||||||
*
|
|
||||||
* This will combine the workflow path and current job name.
|
|
||||||
* Computing this the first time requires making requests to
|
|
||||||
* the GitHub API, but after that the result will be cached.
|
|
||||||
*/
|
|
||||||
async function getAnalysisKey() {
|
|
||||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
|
||||||
let analysisKey = process.env[analysisKeyEnvVar];
|
|
||||||
if (analysisKey !== undefined) {
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
const workflowPath = await (0, workflow_1.getWorkflowRelativePath)();
|
|
||||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
exports.getAnalysisKey = getAnalysisKey;
|
|
||||||
async function getAutomationID() {
|
|
||||||
const analysis_key = await getAnalysisKey();
|
|
||||||
const environment = (0, exports.getRequiredInput)("matrix");
|
|
||||||
return computeAutomationID(analysis_key, environment);
|
|
||||||
}
|
|
||||||
exports.getAutomationID = getAutomationID;
|
|
||||||
function computeAutomationID(analysis_key, environment) {
|
|
||||||
let automationID = `${analysis_key}/`;
|
|
||||||
const matrix = (0, util_1.parseMatrixInput)(environment);
|
|
||||||
if (matrix !== undefined) {
|
|
||||||
// the id has to be deterministic so we sort the fields
|
|
||||||
for (const entry of Object.entries(matrix).sort()) {
|
|
||||||
if (typeof entry[1] === "string") {
|
|
||||||
automationID += `${entry[0]}:${entry[1]}/`;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// In code scanning we just handle the string values,
|
|
||||||
// the rest get converted to the empty string
|
|
||||||
automationID += `${entry[0]}:/`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return automationID;
|
|
||||||
}
|
|
||||||
exports.computeAutomationID = computeAutomationID;
|
|
||||||
/**
|
/**
|
||||||
* Get the ref currently being analyzed.
|
* Get the ref currently being analyzed.
|
||||||
*/
|
*/
|
||||||
@@ -278,159 +229,6 @@ function getActionVersion() {
|
|||||||
return pkg.version;
|
return pkg.version;
|
||||||
}
|
}
|
||||||
exports.getActionVersion = getActionVersion;
|
exports.getActionVersion = getActionVersion;
|
||||||
/**
|
|
||||||
* Compose a StatusReport.
|
|
||||||
*
|
|
||||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
|
||||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
|
||||||
* @param startedAt The time this action started executing.
|
|
||||||
* @param cause Cause of failure (only supply if status is 'failure')
|
|
||||||
* @param exception Exception (only supply if status is 'failure')
|
|
||||||
*/
|
|
||||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
|
||||||
const commitOid = (0, exports.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
|
||||||
const ref = await getRef();
|
|
||||||
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
|
||||||
const workflowRunID = (0, workflow_1.getWorkflowRunID)();
|
|
||||||
const workflowRunAttempt = (0, workflow_1.getWorkflowRunAttempt)();
|
|
||||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
|
||||||
const jobName = process.env["GITHUB_JOB"] || "";
|
|
||||||
const analysis_key = await getAnalysisKey();
|
|
||||||
let workflowStartedAt = process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT];
|
|
||||||
if (workflowStartedAt === undefined) {
|
|
||||||
workflowStartedAt = actionStartedAt.toISOString();
|
|
||||||
core.exportVariable(environment_1.EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
|
||||||
}
|
|
||||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
|
||||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
|
||||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
|
||||||
const testingEnvironment = process.env[environment_1.EnvVar.TESTING_ENVIRONMENT] || "";
|
|
||||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
|
||||||
// even if it was only set for this step
|
|
||||||
if (testingEnvironment !== "") {
|
|
||||||
core.exportVariable(environment_1.EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
|
||||||
}
|
|
||||||
const statusReport = {
|
|
||||||
job_run_uuid: jobRunUUID,
|
|
||||||
workflow_run_id: workflowRunID,
|
|
||||||
workflow_run_attempt: workflowRunAttempt,
|
|
||||||
workflow_name: workflowName,
|
|
||||||
job_name: jobName,
|
|
||||||
analysis_key,
|
|
||||||
commit_oid: commitOid,
|
|
||||||
ref,
|
|
||||||
action_name: actionName,
|
|
||||||
action_ref: actionRef,
|
|
||||||
action_oid: "unknown",
|
|
||||||
started_at: workflowStartedAt,
|
|
||||||
action_started_at: actionStartedAt.toISOString(),
|
|
||||||
status,
|
|
||||||
testing_environment: testingEnvironment,
|
|
||||||
runner_os: runnerOs,
|
|
||||||
action_version: getActionVersion(),
|
|
||||||
};
|
|
||||||
// Add optional parameters
|
|
||||||
if (cause) {
|
|
||||||
statusReport.cause = cause;
|
|
||||||
}
|
|
||||||
if (exception) {
|
|
||||||
statusReport.exception = exception;
|
|
||||||
}
|
|
||||||
if (status === "success" ||
|
|
||||||
status === "failure" ||
|
|
||||||
status === "aborted" ||
|
|
||||||
status === "user-error") {
|
|
||||||
statusReport.completed_at = new Date().toISOString();
|
|
||||||
}
|
|
||||||
const matrix = (0, exports.getRequiredInput)("matrix");
|
|
||||||
if (matrix) {
|
|
||||||
statusReport.matrix_vars = matrix;
|
|
||||||
}
|
|
||||||
if ("RUNNER_ARCH" in process.env) {
|
|
||||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
|
||||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
|
||||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
|
||||||
}
|
|
||||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
|
||||||
statusReport.runner_os_release = os.release();
|
|
||||||
}
|
|
||||||
if (codeQlCliVersion !== undefined) {
|
|
||||||
statusReport.codeql_version = codeQlCliVersion;
|
|
||||||
}
|
|
||||||
return statusReport;
|
|
||||||
}
|
|
||||||
exports.createStatusReportBase = createStatusReportBase;
|
|
||||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
|
||||||
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
|
||||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
|
||||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
|
||||||
/**
|
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
|
||||||
*
|
|
||||||
* Optionally checks the response from the API endpoint and sets the action
|
|
||||||
* as failed if the status report failed. This is only expected to be used
|
|
||||||
* when sending a 'starting' report.
|
|
||||||
*
|
|
||||||
* Returns whether sending the status report was successful of not.
|
|
||||||
*/
|
|
||||||
async function sendStatusReport(statusReport) {
|
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
|
||||||
// If in test mode we don't want to upload the results
|
|
||||||
if ((0, util_1.isInTestMode)()) {
|
|
||||||
core.debug("In test mode. Status reports are not uploaded.");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
|
||||||
const [owner, repo] = nwo.split("/");
|
|
||||||
const client = api.getApiClient();
|
|
||||||
try {
|
|
||||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
data: statusReportJSON,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
if ((0, util_1.isHTTPError)(e)) {
|
|
||||||
switch (e.status) {
|
|
||||||
case 403:
|
|
||||||
if (getWorkflowEventName() === "push" &&
|
|
||||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
|
|
||||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
|
||||||
"Uploading Code Scanning results requires write access. " +
|
|
||||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
|
||||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.setFailed(e.message || GENERIC_403_MSG);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
case 404:
|
|
||||||
core.setFailed(GENERIC_404_MSG);
|
|
||||||
return false;
|
|
||||||
case 422:
|
|
||||||
// schema incompatibility when reporting status
|
|
||||||
// this means that this action version is no longer compatible with the API
|
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
|
||||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
|
||||||
core.debug(INCOMPATIBLE_MSG);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.debug(OUT_OF_DATE_MSG);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// something else has gone wrong and the request/response will be logged by octokit
|
|
||||||
// it's possible this is a transient error and we should continue scanning
|
|
||||||
core.error("An unexpected error occurred when sending code scanning status report.");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.sendStatusReport = sendStatusReport;
|
|
||||||
/**
|
/**
|
||||||
* Returns the name of the event that triggered this workflow.
|
* Returns the name of the event that triggered this workflow.
|
||||||
*
|
*
|
||||||
@@ -545,4 +343,34 @@ function getUploadValue(input) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.getUploadValue = getUploadValue;
|
exports.getUploadValue = getUploadValue;
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
function getWorkflowRunID() {
|
||||||
|
const workflowRunIdString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID");
|
||||||
|
const workflowRunID = parseInt(workflowRunIdString, 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error(`GITHUB_RUN_ID must define a non NaN workflow run ID. Current value is ${workflowRunIdString}`);
|
||||||
|
}
|
||||||
|
if (workflowRunID < 0) {
|
||||||
|
throw new Error(`GITHUB_RUN_ID must be a non-negative integer. Current value is ${workflowRunIdString}`);
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
exports.getWorkflowRunID = getWorkflowRunID;
|
||||||
|
/**
|
||||||
|
* Get the workflow run attempt number.
|
||||||
|
*/
|
||||||
|
function getWorkflowRunAttempt() {
|
||||||
|
const workflowRunAttemptString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ATTEMPT");
|
||||||
|
const workflowRunAttempt = parseInt(workflowRunAttemptString, 10);
|
||||||
|
if (Number.isNaN(workflowRunAttempt)) {
|
||||||
|
throw new Error(`GITHUB_RUN_ATTEMPT must define a non NaN workflow run attempt. Current value is ${workflowRunAttemptString}`);
|
||||||
|
}
|
||||||
|
if (workflowRunAttempt <= 0) {
|
||||||
|
throw new Error(`GITHUB_RUN_ATTEMPT must be a positive integer. Current value is ${workflowRunAttemptString}`);
|
||||||
|
}
|
||||||
|
return workflowRunAttempt;
|
||||||
|
}
|
||||||
|
exports.getWorkflowRunAttempt = getWorkflowRunAttempt;
|
||||||
//# sourceMappingURL=actions-util.js.map
|
//# sourceMappingURL=actions-util.js.map
|
||||||
File diff suppressed because one or more lines are too long
63
lib/actions-util.test.js
generated
63
lib/actions-util.test.js
generated
@@ -30,14 +30,15 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
await t.throwsAsync(actionsutil.getRef);
|
await t.throwsAsync(actionsUtil.getRef);
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
(0, ava_1.default)("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
@@ -46,9 +47,9 @@ const util_1 = require("./util");
|
|||||||
const currentSha = "a".repeat(40);
|
const currentSha = "a".repeat(40);
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -60,10 +61,10 @@ const util_1 = require("./util");
|
|||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
const sha = "a".repeat(40);
|
const sha = "a".repeat(40);
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -73,10 +74,10 @@ const util_1 = require("./util");
|
|||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -84,16 +85,16 @@ const util_1 = require("./util");
|
|||||||
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
(0, ava_1.default)("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||||
// These values are be ignored
|
// These values are be ignored
|
||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
getAdditionalInputStub.restore();
|
getAdditionalInputStub.restore();
|
||||||
@@ -107,7 +108,7 @@ const util_1 = require("./util");
|
|||||||
process.env["CODE_SCANNING_REF"] = expectedRef;
|
process.env["CODE_SCANNING_REF"] = expectedRef;
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -119,17 +120,17 @@ const util_1 = require("./util");
|
|||||||
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
|
||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
(0, ava_1.default)("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
await actionsutil.getRef();
|
await actionsUtil.getRef();
|
||||||
}, {
|
}, {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
@@ -141,10 +142,10 @@ const util_1 = require("./util");
|
|||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
await actionsutil.getRef();
|
await actionsUtil.getRef();
|
||||||
}, {
|
}, {
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
message: "Both 'ref' and 'sha' are required if one of them is provided.",
|
||||||
@@ -153,19 +154,19 @@ const util_1 = require("./util");
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
(0, ava_1.default)("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
let actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"language": "javascript", "os": "linux"}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||||
// check the environment sorting
|
// check the environment sorting
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"os": "linux", "language": "javascript"}');
|
actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"os": "linux", "language": "javascript"}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/");
|
||||||
// check that an empty environment produces the right results
|
// check that an empty environment produces the right results
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", "{}");
|
actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", "{}");
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
// check non string environment values
|
// check non string environment values
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", '{"number": 1, "object": {"language": "javascript"}}');
|
actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", '{"number": 1, "object": {"language": "javascript"}}');
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/number:/object:/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/number:/object:/");
|
||||||
// check undefined environment
|
// check undefined environment
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
actualAutomationID = (0, api_client_1.computeAutomationID)(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||||
@@ -175,7 +176,7 @@ const util_1 = require("./util");
|
|||||||
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
(0, ava_1.default)("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
@@ -188,18 +189,18 @@ const util_1 = require("./util");
|
|||||||
process.env["GITHUB_EVENT_PATH"] = envFile;
|
process.env["GITHUB_EVENT_PATH"] = envFile;
|
||||||
process.env["GITHUB_REF"] = "main";
|
process.env["GITHUB_REF"] = "main";
|
||||||
process.env["GITHUB_SHA"] = "1234";
|
process.env["GITHUB_SHA"] = "1234";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
process.env["GITHUB_REF"] = "feature";
|
process.env["GITHUB_REF"] = "feature";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||||
fs.writeFileSync(envFile, JSON.stringify({
|
fs.writeFileSync(envFile, JSON.stringify({
|
||||||
schedule: "0 0 * * *",
|
schedule: "0 0 * * *",
|
||||||
}));
|
}));
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub
|
getAdditionalInputStub
|
||||||
.withArgs("ref")
|
.withArgs("ref")
|
||||||
.resolves("refs/heads/something-else");
|
.resolves("refs/heads/something-else");
|
||||||
@@ -208,7 +209,7 @@ const util_1 = require("./util");
|
|||||||
.resolves("0000000000000000000000000000000000000000");
|
.resolves("0000000000000000000000000000000000000000");
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||||
getAdditionalInputStub.restore();
|
getAdditionalInputStub.restore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -222,9 +223,9 @@ const util_1 = require("./util");
|
|||||||
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
||||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||||
process.env["RUNNER_OS"] = "macOS";
|
process.env["RUNNER_OS"] = "macOS";
|
||||||
const getRequiredInput = sinon.stub(actionsutil, "getRequiredInput");
|
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||||
const statusReport = await actionsutil.createStatusReportBase("init", "failure", new Date("May 19, 2023 05:19:00"), "failure cause", "exception stack trace");
|
const statusReport = await (0, api_client_1.createStatusReportBase)("init", "failure", new Date("May 19, 2023 05:19:00"), "failure cause", "exception stack trace");
|
||||||
t.assert(typeof statusReport.job_run_uuid === "string");
|
t.assert(typeof statusReport.job_run_uuid === "string");
|
||||||
t.assert(statusReport.workflow_run_id === 100);
|
t.assert(statusReport.workflow_run_id === 100);
|
||||||
t.assert(statusReport.workflow_run_attempt === 2);
|
t.assert(statusReport.workflow_run_attempt === 2);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
7
lib/analyze-action-env.test.js
generated
7
lib/analyze-action-env.test.js
generated
@@ -30,6 +30,7 @@ const ava_1 = __importDefault(require("ava"));
|
|||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze = __importStar(require("./analyze"));
|
const analyze = __importStar(require("./analyze"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -46,9 +47,9 @@ const util = __importStar(require("./util"));
|
|||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(api, "createStatusReportBase")
|
||||||
.resolves({});
|
.resolves({});
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const gitHubVersion = {
|
const gitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
@@ -65,7 +66,7 @@ const util = __importStar(require("./util"));
|
|||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
optionalInputStub.withArgs("expect-error").returns("false");
|
optionalInputStub.withArgs("expect-error").returns("false");
|
||||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
// When there are no action inputs for RAM and threads, the action uses
|
// When there are no action inputs for RAM and threads, the action uses
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,GAAG,EAAE,wBAAwB,CAAC;aACnC,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
7
lib/analyze-action-input.test.js
generated
7
lib/analyze-action-input.test.js
generated
@@ -30,6 +30,7 @@ const ava_1 = __importDefault(require("ava"));
|
|||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze = __importStar(require("./analyze"));
|
const analyze = __importStar(require("./analyze"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -46,9 +47,9 @@ const util = __importStar(require("./util"));
|
|||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(api, "createStatusReportBase")
|
||||||
.resolves({});
|
.resolves({});
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||||
const gitHubVersion = {
|
const gitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
};
|
};
|
||||||
@@ -64,7 +65,7 @@ const util = __importStar(require("./util"));
|
|||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
optionalInputStub.withArgs("expect-error").returns("false");
|
optionalInputStub.withArgs("expect-error").returns("false");
|
||||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,WAAW,EAAE,wBAAwB,CAAC;aAC3C,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC7D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,GAAG,EAAE,wBAAwB,CAAC;aACnC,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||||
13
lib/analyze-action.js
generated
13
lib/analyze-action.js
generated
@@ -34,6 +34,7 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
const autobuild_1 = require("./autobuild");
|
const autobuild_1 = require("./autobuild");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
@@ -49,12 +50,12 @@ const util = __importStar(require("./util"));
|
|||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
|
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
|
||||||
const status = actionsUtil.getActionsStatus(error, stats?.analyze_failure_language);
|
const status = actionsUtil.getActionsStatus(error, stats?.analyze_failure_language);
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("finish", status, startedAt, error?.message, error?.stack);
|
const statusReportBase = await api.createStatusReportBase("finish", status, startedAt, error?.message, error?.stack);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...(config
|
...(config
|
||||||
? {
|
? {
|
||||||
ml_powered_javascript_queries: util.getMlPoweredJsQueriesStatus(config),
|
ml_powered_javascript_queries: (0, config_utils_1.getMlPoweredJsQueriesStatus)(config),
|
||||||
}
|
}
|
||||||
: {}),
|
: {}),
|
||||||
...(stats || {}),
|
...(stats || {}),
|
||||||
@@ -66,10 +67,10 @@ async function sendStatusReport(startedAt, config, stats, error, trapCacheUpload
|
|||||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||||
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(trapCacheUploadStatusReport);
|
await api.sendStatusReport(trapCacheUploadStatusReport);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await api.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.sendStatusReport = sendStatusReport;
|
exports.sendStatusReport = sendStatusReport;
|
||||||
@@ -145,7 +146,7 @@ async function run() {
|
|||||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||||
const logger = (0, logging_1.getActionsLogger)();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
try {
|
try {
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("finish", "starting", startedAt)))) {
|
if (!(await api.sendStatusReport(await api.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
@@ -161,7 +162,7 @@ async function run() {
|
|||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||||
const memory = await util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], features);
|
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], await features.getValue(feature_flags_1.Feature.ScalingReservedRamEnabled));
|
||||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, features);
|
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, features);
|
||||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
4
lib/analyze.js
generated
4
lib/analyze.js
generated
@@ -133,7 +133,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const statusReport = {};
|
const statusReport = {};
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
const queryFlags = [memoryFlag, threadsFlag];
|
const queryFlags = [memoryFlag, threadsFlag];
|
||||||
await util.logCodeScanningConfigInCli(codeql, features, logger);
|
await (0, feature_flags_1.logCodeScanningConfigInCli)(codeql, features, logger);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||||
@@ -142,7 +142,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||||
let startTimeInterpretResults;
|
let startTimeInterpretResults;
|
||||||
let endTimeInterpretResults;
|
let endTimeInterpretResults;
|
||||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
if (await (0, feature_flags_1.useCodeScanningConfigInCli)(codeql, features)) {
|
||||||
// If we are using the code scanning config in the CLI,
|
// If we are using the code scanning config in the CLI,
|
||||||
// much of the work needed to generate the query suites
|
// much of the work needed to generate the query suites
|
||||||
// is done in the CLI. We just need to make a single
|
// is done in the CLI. We just need to make a single
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
249
lib/api-client.js
generated
249
lib/api-client.js
generated
@@ -26,13 +26,16 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getGitHubVersion = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
|
exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRelativePath = exports.sendStatusReport = exports.createStatusReportBase = exports.getGitHubVersion = exports.getGitHubVersionFromApi = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
|
||||||
|
const os = __importStar(require("os"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const util = __importStar(require("./util"));
|
const environment_1 = require("./environment");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||||
var DisallowedAPIVersionReason;
|
var DisallowedAPIVersionReason;
|
||||||
(function (DisallowedAPIVersionReason) {
|
(function (DisallowedAPIVersionReason) {
|
||||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||||
@@ -64,6 +67,29 @@ function getApiClientWithExternalAuth(apiDetails) {
|
|||||||
}
|
}
|
||||||
exports.getApiClientWithExternalAuth = getApiClientWithExternalAuth;
|
exports.getApiClientWithExternalAuth = getApiClientWithExternalAuth;
|
||||||
let cachedGitHubVersion = undefined;
|
let cachedGitHubVersion = undefined;
|
||||||
|
async function getGitHubVersionFromApi(apiClient, apiDetails) {
|
||||||
|
// We can avoid making an API request in the standard dotcom case
|
||||||
|
if ((0, util_1.parseGitHubUrl)(apiDetails.url) === util_1.GITHUB_DOTCOM_URL) {
|
||||||
|
return { type: util_1.GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
// Doesn't strictly have to be the meta endpoint as we're only
|
||||||
|
// using the response headers which are available on every request.
|
||||||
|
const response = await apiClient.rest.meta.get();
|
||||||
|
// This happens on dotcom, although we expect to have already returned in that
|
||||||
|
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
||||||
|
return { type: util_1.GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
||||||
|
return { type: util_1.GitHubVariant.GHAE };
|
||||||
|
}
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
||||||
|
return { type: util_1.GitHubVariant.GHE_DOTCOM };
|
||||||
|
}
|
||||||
|
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
||||||
|
return { type: util_1.GitHubVariant.GHES, version };
|
||||||
|
}
|
||||||
|
exports.getGitHubVersionFromApi = getGitHubVersionFromApi;
|
||||||
/**
|
/**
|
||||||
* Report the GitHub server version. This is a wrapper around
|
* Report the GitHub server version. This is a wrapper around
|
||||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||||
@@ -73,9 +99,226 @@ let cachedGitHubVersion = undefined;
|
|||||||
*/
|
*/
|
||||||
async function getGitHubVersion() {
|
async function getGitHubVersion() {
|
||||||
if (cachedGitHubVersion === undefined) {
|
if (cachedGitHubVersion === undefined) {
|
||||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
cachedGitHubVersion = await getGitHubVersionFromApi(getApiClient(), getApiDetails());
|
||||||
}
|
}
|
||||||
return cachedGitHubVersion;
|
return cachedGitHubVersion;
|
||||||
}
|
}
|
||||||
exports.getGitHubVersion = getGitHubVersion;
|
exports.getGitHubVersion = getGitHubVersion;
|
||||||
|
/**
|
||||||
|
* Compose a StatusReport.
|
||||||
|
*
|
||||||
|
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||||
|
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||||
|
* @param startedAt The time this action started executing.
|
||||||
|
* @param cause Cause of failure (only supply if status is 'failure')
|
||||||
|
* @param exception Exception (only supply if status is 'failure')
|
||||||
|
*/
|
||||||
|
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||||
|
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||||
|
const ref = await (0, actions_util_1.getRef)();
|
||||||
|
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
||||||
|
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
||||||
|
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
||||||
|
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||||
|
const jobName = process.env["GITHUB_JOB"] || "";
|
||||||
|
const analysis_key = await getAnalysisKey();
|
||||||
|
let workflowStartedAt = process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT];
|
||||||
|
if (workflowStartedAt === undefined) {
|
||||||
|
workflowStartedAt = actionStartedAt.toISOString();
|
||||||
|
core.exportVariable(environment_1.EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||||
|
}
|
||||||
|
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||||
|
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||||
|
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||||
|
const testingEnvironment = process.env[environment_1.EnvVar.TESTING_ENVIRONMENT] || "";
|
||||||
|
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||||
|
// even if it was only set for this step
|
||||||
|
if (testingEnvironment !== "") {
|
||||||
|
core.exportVariable(environment_1.EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||||
|
}
|
||||||
|
const statusReport = {
|
||||||
|
job_run_uuid: jobRunUUID,
|
||||||
|
workflow_run_id: workflowRunID,
|
||||||
|
workflow_run_attempt: workflowRunAttempt,
|
||||||
|
workflow_name: workflowName,
|
||||||
|
job_name: jobName,
|
||||||
|
analysis_key,
|
||||||
|
commit_oid: commitOid,
|
||||||
|
ref,
|
||||||
|
action_name: actionName,
|
||||||
|
action_ref: actionRef,
|
||||||
|
action_oid: "unknown",
|
||||||
|
started_at: workflowStartedAt,
|
||||||
|
action_started_at: actionStartedAt.toISOString(),
|
||||||
|
status,
|
||||||
|
testing_environment: testingEnvironment,
|
||||||
|
runner_os: runnerOs,
|
||||||
|
action_version: (0, actions_util_1.getActionVersion)(),
|
||||||
|
};
|
||||||
|
// Add optional parameters
|
||||||
|
if (cause) {
|
||||||
|
statusReport.cause = cause;
|
||||||
|
}
|
||||||
|
if (exception) {
|
||||||
|
statusReport.exception = exception;
|
||||||
|
}
|
||||||
|
if (status === "success" ||
|
||||||
|
status === "failure" ||
|
||||||
|
status === "aborted" ||
|
||||||
|
status === "user-error") {
|
||||||
|
statusReport.completed_at = new Date().toISOString();
|
||||||
|
}
|
||||||
|
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||||
|
if (matrix) {
|
||||||
|
statusReport.matrix_vars = matrix;
|
||||||
|
}
|
||||||
|
if ("RUNNER_ARCH" in process.env) {
|
||||||
|
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||||
|
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||||
|
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||||
|
}
|
||||||
|
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||||
|
statusReport.runner_os_release = os.release();
|
||||||
|
}
|
||||||
|
if (codeQlCliVersion !== undefined) {
|
||||||
|
statusReport.codeql_version = codeQlCliVersion;
|
||||||
|
}
|
||||||
|
return statusReport;
|
||||||
|
}
|
||||||
|
exports.createStatusReportBase = createStatusReportBase;
|
||||||
|
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||||
|
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||||
|
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
|
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||||
|
/**
|
||||||
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
|
*
|
||||||
|
* Optionally checks the response from the API endpoint and sets the action
|
||||||
|
* as failed if the status report failed. This is only expected to be used
|
||||||
|
* when sending a 'starting' report.
|
||||||
|
*
|
||||||
|
* Returns whether sending the status report was successful of not.
|
||||||
|
*/
|
||||||
|
async function sendStatusReport(statusReport) {
|
||||||
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
|
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||||
|
// If in test mode we don't want to upload the results
|
||||||
|
if ((0, util_1.isInTestMode)()) {
|
||||||
|
core.debug("In test mode. Status reports are not uploaded.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||||
|
const [owner, repo] = nwo.split("/");
|
||||||
|
const client = getApiClient();
|
||||||
|
try {
|
||||||
|
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
data: statusReportJSON,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
if ((0, util_1.isHTTPError)(e)) {
|
||||||
|
switch (e.status) {
|
||||||
|
case 403:
|
||||||
|
if ((0, actions_util_1.getWorkflowEventName)() === "push" &&
|
||||||
|
process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
|
||||||
|
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||||
|
"Uploading Code Scanning results requires write access. " +
|
||||||
|
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||||
|
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.setFailed(e.message || GENERIC_403_MSG);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
case 404:
|
||||||
|
core.setFailed(GENERIC_404_MSG);
|
||||||
|
return false;
|
||||||
|
case 422:
|
||||||
|
// schema incompatibility when reporting status
|
||||||
|
// this means that this action version is no longer compatible with the API
|
||||||
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
|
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||||
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.debug(OUT_OF_DATE_MSG);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// something else has gone wrong and the request/response will be logged by octokit
|
||||||
|
// it's possible this is a transient error and we should continue scanning
|
||||||
|
core.error("An unexpected error occurred when sending code scanning status report.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.sendStatusReport = sendStatusReport;
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow relative to the repository root.
|
||||||
|
*/
|
||||||
|
async function getWorkflowRelativePath() {
|
||||||
|
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
|
const apiClient = getApiClient();
|
||||||
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
});
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
exports.getWorkflowRelativePath = getWorkflowRelativePath;
|
||||||
|
/**
|
||||||
|
* Get the analysis key parameter for the current job.
|
||||||
|
*
|
||||||
|
* This will combine the workflow path and current job name.
|
||||||
|
* Computing this the first time requires making requests to
|
||||||
|
* the GitHub API, but after that the result will be cached.
|
||||||
|
*/
|
||||||
|
async function getAnalysisKey() {
|
||||||
|
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
||||||
|
let analysisKey = process.env[analysisKeyEnvVar];
|
||||||
|
if (analysisKey !== undefined) {
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
const workflowPath = await getWorkflowRelativePath();
|
||||||
|
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||||
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
exports.getAnalysisKey = getAnalysisKey;
|
||||||
|
async function getAutomationID() {
|
||||||
|
const analysis_key = await getAnalysisKey();
|
||||||
|
const environment = (0, actions_util_1.getRequiredInput)("matrix");
|
||||||
|
return computeAutomationID(analysis_key, environment);
|
||||||
|
}
|
||||||
|
exports.getAutomationID = getAutomationID;
|
||||||
|
function computeAutomationID(analysis_key, environment) {
|
||||||
|
let automationID = `${analysis_key}/`;
|
||||||
|
const matrix = (0, util_1.parseMatrixInput)(environment);
|
||||||
|
if (matrix !== undefined) {
|
||||||
|
// the id has to be deterministic so we sort the fields
|
||||||
|
for (const entry of Object.entries(matrix).sort()) {
|
||||||
|
if (typeof entry[1] === "string") {
|
||||||
|
automationID += `${entry[0]}:${entry[1]}/`;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// In code scanning we just handle the string values,
|
||||||
|
// the rest get converted to the empty string
|
||||||
|
automationID += `${entry[0]}:/`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return automationID;
|
||||||
|
}
|
||||||
|
exports.computeAutomationID = computeAutomationID;
|
||||||
//# sourceMappingURL=api-client.js.map
|
//# sourceMappingURL=api-client.js.map
|
||||||
File diff suppressed because one or more lines are too long
74
lib/api-client.test.js
generated
74
lib/api-client.test.js
generated
@@ -26,23 +26,22 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const github = __importStar(require("@actions/github"));
|
||||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const api_client_1 = require("./api-client");
|
const api = __importStar(require("./api-client"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
let pluginStub;
|
|
||||||
let githubStub;
|
|
||||||
ava_1.default.beforeEach(() => {
|
ava_1.default.beforeEach(() => {
|
||||||
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
|
||||||
githubStub = sinon.stub();
|
|
||||||
pluginStub.returns(githubStub);
|
|
||||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getApiClient", async (t) => {
|
(0, ava_1.default)("getApiClient", async (t) => {
|
||||||
|
const pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||||
|
const githubStub = sinon.stub();
|
||||||
|
pluginStub.returns(githubStub);
|
||||||
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
|
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
|
||||||
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
||||||
requiredEnvParamStub
|
requiredEnvParamStub
|
||||||
@@ -51,7 +50,7 @@ ava_1.default.beforeEach(() => {
|
|||||||
requiredEnvParamStub
|
requiredEnvParamStub
|
||||||
.withArgs("GITHUB_API_URL")
|
.withArgs("GITHUB_API_URL")
|
||||||
.returns("http://api.github.localhost");
|
.returns("http://api.github.localhost");
|
||||||
(0, api_client_1.getApiClient)();
|
api.getApiClient();
|
||||||
t.assert(githubStub.calledOnceWithExactly({
|
t.assert(githubStub.calledOnceWithExactly({
|
||||||
auth: "token xyz",
|
auth: "token xyz",
|
||||||
baseUrl: "http://api.github.localhost",
|
baseUrl: "http://api.github.localhost",
|
||||||
@@ -59,4 +58,65 @@ ava_1.default.beforeEach(() => {
|
|||||||
userAgent: `CodeQL-Action/${actionsUtil.getActionVersion()}`,
|
userAgent: `CodeQL-Action/${actionsUtil.getActionVersion()}`,
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
function mockGetMetaVersionHeader(versionHeader) {
|
||||||
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
|
const client = github.getOctokit("123");
|
||||||
|
const response = {
|
||||||
|
headers: {
|
||||||
|
"x-github-enterprise-version": versionHeader,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const spyGetContents = sinon
|
||||||
|
.stub(client.rest.meta, "get")
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||||
|
.resolves(response);
|
||||||
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
|
return spyGetContents;
|
||||||
|
}
|
||||||
|
(0, ava_1.default)("getGitHubVersion for Dotcom", async (t) => {
|
||||||
|
const apiDetails = {
|
||||||
|
auth: "",
|
||||||
|
url: "https://github.com",
|
||||||
|
apiURL: "",
|
||||||
|
};
|
||||||
|
sinon.stub(api, "getApiDetails").returns(apiDetails);
|
||||||
|
const v = await api.getGitHubVersionFromApi(github.getOctokit("123"), apiDetails);
|
||||||
|
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getGitHubVersion for GHES", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("2.0");
|
||||||
|
const v2 = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHES, version: "2.0" }, v2);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getGitHubVersion for GHAE", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("GitHub AE");
|
||||||
|
const ghae = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getGitHubVersion for different domain", async (t) => {
|
||||||
|
mockGetMetaVersionHeader(undefined);
|
||||||
|
const v3 = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getGitHubVersion for GHE_DOTCOM", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("ghe.com");
|
||||||
|
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://foo.ghe.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||||
|
});
|
||||||
//# sourceMappingURL=api-client.test.js.map
|
//# sourceMappingURL=api-client.test.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,6CAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAI,UAA2B,CAAC;AAChC,IAAI,UAA2B,CAAC;AAEhC,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IACtD,UAAU,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;IAC1B,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAC/B,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC7D,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/B,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC7E,MAAM,oBAAoB,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,qBAAqB,CAAC,CAAC;IACrE,oBAAoB;SACjB,QAAQ,CAAC,mBAAmB,CAAC;SAC7B,OAAO,CAAC,yBAAyB,CAAC,CAAC;IACtC,oBAAoB;SACjB,QAAQ,CAAC,gBAAgB,CAAC;SAC1B,OAAO,CAAC,6BAA6B,CAAC,CAAC;IAE1C,IAAA,yBAAY,GAAE,CAAC;IAEf,CAAC,CAAC,MAAM,CACN,UAAU,CAAC,qBAAqB,CAAC;QAC/B,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B;QACtC,GAAG,EAAE,KAAK,CAAC,KAAK,CAAC,GAAG;QACpB,SAAS,EAAE,iBAAiB,WAAW,CAAC,gBAAgB,EAAE,EAAE;KAC7D,CAAC,CACH,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
{"version":3,"file":"api-client.test.js","sourceRoot":"","sources":["../src/api-client.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAC1C,uEAAyD;AACzD,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,kDAAoC;AACpC,mDAA6C;AAC7C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,UAAU,CAAC,GAAG,EAAE;IACnB,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,gBAAgB,EAAE,CAAC,CAAC;AAC7D,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,cAAc,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/B,MAAM,UAAU,GAAoB,KAAK,CAAC,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAC7E,MAAM,UAAU,GAAoB,KAAK,CAAC,IAAI,EAAE,CAAC;IACjD,UAAU,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IAE/B,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAC7E,MAAM,oBAAoB,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,EAAE,qBAAqB,CAAC,CAAC;IACrE,oBAAoB;SACjB,QAAQ,CAAC,mBAAmB,CAAC;SAC7B,OAAO,CAAC,yBAAyB,CAAC,CAAC;IACtC,oBAAoB;SACjB,QAAQ,CAAC,gBAAgB,CAAC;SAC1B,OAAO,CAAC,6BAA6B,CAAC,CAAC;IAE1C,GAAG,CAAC,YAAY,EAAE,CAAC;IAEnB,CAAC,CAAC,MAAM,CACN,UAAU,CAAC,qBAAqB,CAAC;QAC/B,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,6BAA6B;QACtC,GAAG,EAAE,KAAK,CAAC,KAAK,CAAC,GAAG;QACpB,SAAS,EAAE,iBAAiB,WAAW,CAAC,gBAAgB,EAAE,EAAE;KAC7D,CAAC,CACH,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,wBAAwB,CAC/B,aAAiC;IAEjC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACxC,MAAM,QAAQ,GAAG;QACf,OAAO,EAAE;YACP,6BAA6B,EAAE,aAAa;SAC7C;KACF,CAAC;IACF,MAAM,cAAc,GAAG,KAAK;SACzB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,KAAK,CAAC;QAC9B,iEAAiE;SAChE,QAAQ,CAAC,QAAe,CAAC,CAAC;IAC7B,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;IACpD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,IAAA,aAAI,EAAC,6BAA6B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9C,MAAM,UAAU,GAAG;QACjB,IAAI,EAAE,EAAE;QACR,GAAG,EAAE,oBAAoB;QACzB,MAAM,EAAE,EAAE;KACX,CAAC;IACF,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,eAAe,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;IACrD,MAAM,CAAC,GAAG,MAAM,GAAG,CAAC,uBAAuB,CACzC,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,EACxB,UAAU,CACX,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,CAAC,CAAC;AACjD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,2BAA2B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5C,wBAAwB,CAAC,KAAK,CAAC,CAAC;IAChC,MAAM,EAAE,GAAG,MAAM,GAAG,CAAC,uBAAuB,CAAC,GAAG,CAAC,YAAY,EAAE,EAAE;QAC/D,IAAI,EAAE,EAAE;QACR,GAAG,EAAE,yBAAyB;QAC9B,MAAM,EAAE,SAAS;KAClB,CAAC,CAAC;IACH,CAAC,CAAC,SAAS,CACT,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,EAAwB,EACvE,EAAE,CACH,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,2BAA2B,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC5C,wBAAwB,CAAC,WAAW,CAAC,CAAC;IACtC,MAAM,IAAI,GAAG,MAAM,GAAG,CAAC,uBAAuB,CAAC,GAAG,CAAC,YAAY,EAAE,EAAE;QACjE,IAAI,EAAE,EAAE;QACR,GAAG,EAAE,sCAAsC;QAC3C,MAAM,EAAE,SAAS;KAClB,CAAC,CAAC;IACH,CAAC,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,EAAE,IAAI,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,uCAAuC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACxD,wBAAwB,CAAC,SAAS,CAAC,CAAC;IACpC,MAAM,EAAE,GAAG,MAAM,GAAG,CAAC,uBAAuB,CAAC,GAAG,CAAC,YAAY,EAAE,EAAE;QAC/D,IAAI,EAAE,EAAE;QACR,GAAG,EAAE,yBAAyB;QAC9B,MAAM,EAAE,SAAS;KAClB,CAAC,CAAC;IACH,CAAC,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,EAAE,EAAE,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,iCAAiC,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAClD,wBAAwB,CAAC,SAAS,CAAC,CAAC;IACpC,MAAM,SAAS,GAAG,MAAM,GAAG,CAAC,uBAAuB,CAAC,GAAG,CAAC,YAAY,EAAE,EAAE;QACtE,IAAI,EAAE,EAAE;QACR,GAAG,EAAE,qBAAqB;QAC1B,MAAM,EAAE,SAAS;KAClB,CAAC,CAAC;IACH,CAAC,CAAC,SAAS,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,UAAU,EAAE,EAAE,SAAS,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC"}
|
||||||
6
lib/autobuild-action.js
generated
6
lib/autobuild-action.js
generated
@@ -35,13 +35,13 @@ const util_1 = require("./util");
|
|||||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||||
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
||||||
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
|
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
|
||||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("autobuild", status, startedAt, cause?.message, cause?.stack);
|
const statusReportBase = await (0, api_client_1.createStatusReportBase)("autobuild", status, startedAt, cause?.message, cause?.stack);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
autobuild_languages: allLanguages.join(","),
|
autobuild_languages: allLanguages.join(","),
|
||||||
autobuild_failure: failingLanguage,
|
autobuild_failure: failingLanguage,
|
||||||
};
|
};
|
||||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
@@ -49,7 +49,7 @@ async function run() {
|
|||||||
let currentLanguage = undefined;
|
let currentLanguage = undefined;
|
||||||
let languages = undefined;
|
let languages = undefined;
|
||||||
try {
|
try {
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAQwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,+CAAuC;AACvC,2CAAuC;AACvC,uCAA6C;AAC7C,iCAIgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;iBAC1D;aACF;SACF;KACF;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KACxE;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,6CAIsB;AACtB,2CAAwE;AACxE,4DAA8C;AAC9C,+CAAuC;AACvC,2CAAuC;AACvC,uCAA6C;AAC7C,iCAIgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;iBAC1D;aACF;SACF;KACF;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KACxE;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
54
lib/codeql.js
generated
54
lib/codeql.js
generated
@@ -23,21 +23,19 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
exports.getGeneratedCodeScanningConfigPath = exports.getTrapCachingExtractorConfigArgsForLang = exports.getTrapCachingExtractorConfigArgs = exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const config_utils_1 = require("./config-utils");
|
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const error_matcher_1 = require("./error-matcher");
|
const error_matcher_1 = require("./error-matcher");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
class CommandInvocationError extends Error {
|
class CommandInvocationError extends Error {
|
||||||
@@ -99,14 +97,6 @@ exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = "2.12.4";
|
|||||||
* Versions 2.13.4+ of the CodeQL CLI support the `resolve build-environment` command.
|
* Versions 2.13.4+ of the CodeQL CLI support the `resolve build-environment` command.
|
||||||
*/
|
*/
|
||||||
exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = "2.13.4";
|
exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = "2.13.4";
|
||||||
/**
|
|
||||||
* Versions 2.13.4+ of the CodeQL CLI have an associated CodeQL Bundle release that is semantically versioned.
|
|
||||||
*/
|
|
||||||
exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
|
||||||
/**
|
|
||||||
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
|
||||||
*/
|
|
||||||
exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
|
||||||
/**
|
/**
|
||||||
* Set up CodeQL CLI access.
|
* Set up CodeQL CLI access.
|
||||||
*
|
*
|
||||||
@@ -248,7 +238,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||||
extraArgs.push("--begin-tracing");
|
extraArgs.push("--begin-tracing");
|
||||||
extraArgs.push(...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config)));
|
extraArgs.push(...(await getTrapCachingExtractorConfigArgs(config)));
|
||||||
extraArgs.push(`--trace-process-name=${processName}`);
|
extraArgs.push(`--trace-process-name=${processName}`);
|
||||||
if (
|
if (
|
||||||
// There's a bug in Lua tracing for Go on Windows in versions earlier than
|
// There's a bug in Lua tracing for Go on Windows in versions earlier than
|
||||||
@@ -324,7 +314,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
|
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"trace-command",
|
"trace-command",
|
||||||
...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgsForLang)(config, language)),
|
...(await getTrapCachingExtractorConfigArgsForLang(config, language)),
|
||||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||||
databasePath,
|
databasePath,
|
||||||
"--",
|
"--",
|
||||||
@@ -470,7 +460,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
if (await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, this)) {
|
if (await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, this)) {
|
||||||
codeqlArgs.push("--new-analysis-summary");
|
codeqlArgs.push("--new-analysis-summary");
|
||||||
}
|
}
|
||||||
else if (await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)) {
|
else if (await util.codeQlVersionAbove(this, feature_flags_1.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)) {
|
||||||
codeqlArgs.push("--no-new-analysis-summary");
|
codeqlArgs.push("--no-new-analysis-summary");
|
||||||
}
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
@@ -740,10 +730,10 @@ async function runTool(cmd, args = [], opts = {}) {
|
|||||||
* @returns the path to the generated user configuration file.
|
* @returns the path to the generated user configuration file.
|
||||||
*/
|
*/
|
||||||
async function generateCodeScanningConfig(codeql, config, features, logger) {
|
async function generateCodeScanningConfig(codeql, config, features, logger) {
|
||||||
if (!(await util.useCodeScanningConfigInCli(codeql, features))) {
|
if (!(await (0, feature_flags_1.useCodeScanningConfigInCli)(codeql, features))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const codeScanningConfigFile = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config);
|
||||||
// make a copy so we can modify it
|
// make a copy so we can modify it
|
||||||
const augmentedConfig = cloneObject(config.originalUserInput);
|
const augmentedConfig = cloneObject(config.originalUserInput);
|
||||||
// Inject the queries from the input
|
// Inject the queries from the input
|
||||||
@@ -814,11 +804,41 @@ function cloneObject(obj) {
|
|||||||
* Returns an empty list if a code scanning configuration file was not generated by the CLI.
|
* Returns an empty list if a code scanning configuration file was not generated by the CLI.
|
||||||
*/
|
*/
|
||||||
async function getCodeScanningConfigExportArguments(config, codeql) {
|
async function getCodeScanningConfigExportArguments(config, codeql) {
|
||||||
const codeScanningConfigPath = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
const codeScanningConfigPath = getGeneratedCodeScanningConfigPath(config);
|
||||||
if (fs.existsSync(codeScanningConfigPath) &&
|
if (fs.existsSync(codeScanningConfigPath) &&
|
||||||
(await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG))) {
|
(await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG))) {
|
||||||
return ["--sarif-codescanning-config", codeScanningConfigPath];
|
return ["--sarif-codescanning-config", codeScanningConfigPath];
|
||||||
}
|
}
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
// This constant sets the size of each TRAP cache in megabytes.
|
||||||
|
const TRAP_CACHE_SIZE_MB = 1024;
|
||||||
|
async function getTrapCachingExtractorConfigArgs(config) {
|
||||||
|
const result = [];
|
||||||
|
for (const language of config.languages)
|
||||||
|
result.push(await getTrapCachingExtractorConfigArgsForLang(config, language));
|
||||||
|
return result.flat();
|
||||||
|
}
|
||||||
|
exports.getTrapCachingExtractorConfigArgs = getTrapCachingExtractorConfigArgs;
|
||||||
|
async function getTrapCachingExtractorConfigArgsForLang(config, language) {
|
||||||
|
const cacheDir = config.trapCaches[language];
|
||||||
|
if (cacheDir === undefined)
|
||||||
|
return [];
|
||||||
|
const write = await (0, actions_util_1.isAnalyzingDefaultBranch)();
|
||||||
|
return [
|
||||||
|
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
||||||
|
`-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`,
|
||||||
|
`-O=${language}.trap.cache.write=${write}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
exports.getTrapCachingExtractorConfigArgsForLang = getTrapCachingExtractorConfigArgsForLang;
|
||||||
|
/**
|
||||||
|
* Get the path to the code scanning configuration generated by the CLI.
|
||||||
|
*
|
||||||
|
* This will not exist if the configuration is being parsed in the Action.
|
||||||
|
*/
|
||||||
|
function getGeneratedCodeScanningConfigPath(config) {
|
||||||
|
return path.resolve(config.tempDir, "user-config.yaml");
|
||||||
|
}
|
||||||
|
exports.getGeneratedCodeScanningConfigPath = getGeneratedCodeScanningConfigPath;
|
||||||
//# sourceMappingURL=codeql.js.map
|
//# sourceMappingURL=codeql.js.map
|
||||||
File diff suppressed because one or more lines are too long
20
lib/codeql.test.js
generated
20
lib/codeql.test.js
generated
@@ -41,9 +41,9 @@ const actionsUtil = __importStar(require("./actions-util"));
|
|||||||
const codeql = __importStar(require("./codeql"));
|
const codeql = __importStar(require("./codeql"));
|
||||||
const defaults = __importStar(require("./defaults.json"));
|
const defaults = __importStar(require("./defaults.json"));
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const init_1 = require("./init");
|
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
|
const setup_codeql_1 = require("./setup-codeql");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
@@ -125,7 +125,7 @@ function mockApiDetails(apiDetails) {
|
|||||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
t.is(result.toolsVersion, `0.0.0-${version}`);
|
t.is(result.toolsVersion, `0.0.0-${version}`);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
}
|
}
|
||||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||||
@@ -145,7 +145,7 @@ function mockApiDetails(apiDetails) {
|
|||||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -180,7 +180,7 @@ for (const { cliVersion, expectedToolcacheVersion, } of EXPLICITLY_REQUESTED_BUN
|
|||||||
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
||||||
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
||||||
t.deepEqual(result.toolsVersion, cliVersion);
|
t.deepEqual(result.toolsVersion, cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -202,7 +202,7 @@ for (const toolcacheVersion of [
|
|||||||
sinon.stub(toolcache, "findAllVersions").returns([toolcacheVersion]);
|
sinon.stub(toolcache, "findAllVersions").returns([toolcacheVersion]);
|
||||||
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(undefined, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsVersion, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
t.is(result.toolsVersion, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Toolcache);
|
||||||
t.is(result.toolsDownloadDurationMs, undefined);
|
t.is(result.toolsDownloadDurationMs, undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -221,7 +221,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
|
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Toolcache);
|
||||||
t.is(result.toolsDownloadDurationMs, undefined);
|
t.is(result.toolsDownloadDurationMs, undefined);
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
@@ -243,7 +243,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
@@ -263,7 +263,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL("latest", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL("latest", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
@@ -306,7 +306,7 @@ for (const isBundleVersionInUrl of [true, false]) {
|
|||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
@@ -329,7 +329,7 @@ for (const isBundleVersionInUrl of [true, false]) {
|
|||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL("https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL("https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsVersion, "0.0.0-20230203");
|
t.is(result.toolsVersion, "0.0.0-20230203");
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||||
t.true(Number.isInteger(result.toolsDownloadDurationMs));
|
t.true(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
63
lib/config-utils.js
generated
63
lib/config-utils.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getGeneratedCodeScanningConfigPath = exports.wrapEnvironment = exports.generateRegistries = exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
exports.wrapEnvironment = exports.generateRegistries = exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.getMlPoweredJsQueriesStatus = exports.parsePacks = exports.validatePackSpecification = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -253,7 +253,7 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
|||||||
// Otherwise, must be a reference to another repo.
|
// Otherwise, must be a reference to another repo.
|
||||||
// If config parsing is handled in CLI, then this repo will be downloaded
|
// If config parsing is handled in CLI, then this repo will be downloaded
|
||||||
// later by the CLI.
|
// later by the CLI.
|
||||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
if (!(await (0, feature_flags_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@@ -870,12 +870,8 @@ function parsePacksSpecification(packStr, configFile) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.parsePacksSpecification = parsePacksSpecification;
|
exports.parsePacksSpecification = parsePacksSpecification;
|
||||||
function prettyPrintPack(pack) {
|
|
||||||
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
|
|
||||||
}
|
|
||||||
exports.prettyPrintPack = prettyPrintPack;
|
|
||||||
function validatePackSpecification(pack, configFile) {
|
function validatePackSpecification(pack, configFile) {
|
||||||
return prettyPrintPack(parsePacksSpecification(pack, configFile));
|
return (0, util_1.prettyPrintPack)(parsePacksSpecification(pack, configFile));
|
||||||
}
|
}
|
||||||
exports.validatePackSpecification = validatePackSpecification;
|
exports.validatePackSpecification = validatePackSpecification;
|
||||||
// exported for testing
|
// exported for testing
|
||||||
@@ -919,6 +915,46 @@ function combinePacks(packs1, packs2) {
|
|||||||
}
|
}
|
||||||
return packs;
|
return packs;
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Get information about ML-powered JS queries to populate status reports with.
|
||||||
|
*
|
||||||
|
* This will be:
|
||||||
|
*
|
||||||
|
* - The version string if the analysis is using a single version of the ML-powered query pack.
|
||||||
|
* - "latest" if the version string of the ML-powered query pack is undefined. This is unlikely to
|
||||||
|
* occur in practice (see comment below).
|
||||||
|
* - "false" if the analysis won't run any ML-powered JS queries.
|
||||||
|
* - "other" in all other cases.
|
||||||
|
*
|
||||||
|
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
|
||||||
|
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
|
||||||
|
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
|
||||||
|
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
||||||
|
* version strings.
|
||||||
|
*
|
||||||
|
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
||||||
|
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
||||||
|
* explanation as to why this is.
|
||||||
|
*/
|
||||||
|
function getMlPoweredJsQueriesStatus(config) {
|
||||||
|
const mlPoweredJsQueryPacks = (config.packs.javascript || [])
|
||||||
|
.map((p) => parsePacksSpecification(p))
|
||||||
|
.filter((pack) => pack.name === util_1.ML_POWERED_JS_QUERIES_PACK_NAME && !pack.path);
|
||||||
|
switch (mlPoweredJsQueryPacks.length) {
|
||||||
|
case 1:
|
||||||
|
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
||||||
|
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
||||||
|
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
||||||
|
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
||||||
|
// CodeQL config.
|
||||||
|
return mlPoweredJsQueryPacks[0].version || "latest";
|
||||||
|
case 0:
|
||||||
|
return "false";
|
||||||
|
default:
|
||||||
|
return "other";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
||||||
function dbLocationOrDefault(dbLocation, tempDir) {
|
function dbLocationOrDefault(dbLocation, tempDir) {
|
||||||
return dbLocation || path.resolve(tempDir, "codeql_databases");
|
return dbLocation || path.resolve(tempDir, "codeql_databases");
|
||||||
}
|
}
|
||||||
@@ -950,8 +986,8 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
|||||||
// When using the codescanning config in the CLI, pack downloads
|
// When using the codescanning config in the CLI, pack downloads
|
||||||
// happen in the CLI during the `database init` command, so no need
|
// happen in the CLI during the `database init` command, so no need
|
||||||
// to download them here.
|
// to download them here.
|
||||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, features, logger);
|
await (0, feature_flags_1.logCodeScanningConfigInCli)(codeQL, features, logger);
|
||||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
if (!(await (0, feature_flags_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||||
// The list of queries should not be empty for any language. If it is then
|
// The list of queries should not be empty for any language. If it is then
|
||||||
// it is a user configuration error.
|
// it is a user configuration error.
|
||||||
// This check occurs in the CLI when it parses the config file.
|
// This check occurs in the CLI when it parses the config file.
|
||||||
@@ -1182,13 +1218,4 @@ async function wrapEnvironment(env, operation) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.wrapEnvironment = wrapEnvironment;
|
exports.wrapEnvironment = wrapEnvironment;
|
||||||
/**
|
|
||||||
* Get the path to the code scanning configuration generated by the CLI.
|
|
||||||
*
|
|
||||||
* This will not exist if the configuration is being parsed in the Action.
|
|
||||||
*/
|
|
||||||
function getGeneratedCodeScanningConfigPath(config) {
|
|
||||||
return path.resolve(config.tempDir, "user-config.yaml");
|
|
||||||
}
|
|
||||||
exports.getGeneratedCodeScanningConfigPath = getGeneratedCodeScanningConfigPath;
|
|
||||||
//# sourceMappingURL=config-utils.js.map
|
//# sourceMappingURL=config-utils.js.map
|
||||||
File diff suppressed because one or more lines are too long
62
lib/config-utils.test.js
generated
62
lib/config-utils.test.js
generated
@@ -990,7 +990,7 @@ const packSpecPrettyPrintingMacro = ava_1.default.macro({
|
|||||||
exec: (t, packStr, packObj) => {
|
exec: (t, packStr, packObj) => {
|
||||||
const parsed = configUtils.parsePacksSpecification(packStr);
|
const parsed = configUtils.parsePacksSpecification(packStr);
|
||||||
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
||||||
const stringified = configUtils.prettyPrintPack(packObj);
|
const stringified = (0, util_1.prettyPrintPack)(packObj);
|
||||||
t.deepEqual(stringified, packStr.trim(), "pretty-printed pack spec is correct");
|
t.deepEqual(stringified, packStr.trim(), "pretty-printed pack spec is correct");
|
||||||
t.deepEqual(configUtils.validatePackSpecification(packStr), packStr.trim(), "pack spec is valid");
|
t.deepEqual(configUtils.validatePackSpecification(packStr), packStr.trim(), "pack spec is valid");
|
||||||
},
|
},
|
||||||
@@ -1456,4 +1456,64 @@ const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
|||||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
const ML_POWERED_JS_STATUS_TESTS = [
|
||||||
|
// If no packs are loaded, status is false.
|
||||||
|
[[], "false"],
|
||||||
|
// If another pack is loaded but not the ML-powered query pack, status is false.
|
||||||
|
[["some-other/pack"], "false"],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
||||||
|
[[`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
||||||
|
// status is the version of the ML-powered query pack.
|
||||||
|
[["some-other/pack", `${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
||||||
|
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
||||||
|
[[util_1.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
|
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
||||||
|
[
|
||||||
|
[
|
||||||
|
`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.1`,
|
||||||
|
`${util_1.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.2`,
|
||||||
|
],
|
||||||
|
"other",
|
||||||
|
],
|
||||||
|
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
||||||
|
// the status is "latest".
|
||||||
|
[["some-other/pack", util_1.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
|
];
|
||||||
|
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||||
|
const packDescriptions = `[${packs
|
||||||
|
.map((pack) => JSON.stringify(pack))
|
||||||
|
.join(", ")}]`;
|
||||||
|
(0, ava_1.default)(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
|
||||||
|
return (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
|
const config = {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
paths: [],
|
||||||
|
pathsIgnore: [],
|
||||||
|
originalUserInput: {},
|
||||||
|
tempDir: tmpDir,
|
||||||
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: util_1.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
dbLocation: "",
|
||||||
|
packs: {
|
||||||
|
javascript: packs,
|
||||||
|
},
|
||||||
|
debugMode: false,
|
||||||
|
debugArtifactName: util_1.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
|
debugDatabaseName: util_1.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
augmentationProperties: {
|
||||||
|
injectedMlQueries: false,
|
||||||
|
packsInputCombines: false,
|
||||||
|
queriesInputCombines: false,
|
||||||
|
},
|
||||||
|
trapCaches: {},
|
||||||
|
trapCacheDownloadTime: 0,
|
||||||
|
};
|
||||||
|
t.is(configUtils.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
//# sourceMappingURL=config-utils.test.js.map
|
//# sourceMappingURL=config-utils.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
2
lib/error-matcher.js
generated
2
lib/error-matcher.js
generated
@@ -10,7 +10,7 @@ exports.namedMatchersForTesting = {
|
|||||||
exitCode: 32,
|
exitCode: 32,
|
||||||
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
|
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
|
||||||
message: "No code found during the build. Please see:\n" +
|
message: "No code found during the build. Please see:\n" +
|
||||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
"https://gh.io/troubleshooting-code-scanning/no-source-code-seen-during-build",
|
||||||
},
|
},
|
||||||
fatalError: {
|
fatalError: {
|
||||||
outputRegex: new RegExp("A fatal error occurred"),
|
outputRegex: new RegExp("A fatal error occurred"),
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;IACD,UAAU,EAAE;QACV,WAAW,EAAE,IAAI,MAAM,CAAC,wBAAwB,CAAC;QACjD,OAAO,EAAE,yBAAyB;KACnC;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,8EAA8E;KACjF;IACD,UAAU,EAAE;QACV,WAAW,EAAE,IAAI,MAAM,CAAC,wBAAwB,CAAC;QACjD,OAAO,EAAE,yBAAyB;KACnC;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||||
32
lib/feature-flags.js
generated
32
lib/feature-flags.js
generated
@@ -23,16 +23,23 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = void 0;
|
exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
const codeql_1 = require("./codeql");
|
|
||||||
const defaults = __importStar(require("./defaults.json"));
|
const defaults = __importStar(require("./defaults.json"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||||
|
/**
|
||||||
|
* Versions 2.13.4+ of the CodeQL CLI have an associated CodeQL Bundle release that is semantically versioned.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
||||||
|
/**
|
||||||
|
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
||||||
/**
|
/**
|
||||||
* Feature enablement as returned by the GitHub API endpoint.
|
* Feature enablement as returned by the GitHub API endpoint.
|
||||||
*
|
*
|
||||||
@@ -73,7 +80,7 @@ exports.featureConfig = {
|
|||||||
},
|
},
|
||||||
[Feature.NewAnalysisSummaryEnabled]: {
|
[Feature.NewAnalysisSummaryEnabled]: {
|
||||||
envVar: "CODEQL_ACTION_NEW_ANALYSIS_SUMMARY",
|
envVar: "CODEQL_ACTION_NEW_ANALYSIS_SUMMARY",
|
||||||
minimumVersion: codeql_1.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
minimumVersion: exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
[Feature.QaTelemetryEnabled]: {
|
[Feature.QaTelemetryEnabled]: {
|
||||||
@@ -205,7 +212,7 @@ class GitHubFeatureFlags {
|
|||||||
.map(([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : undefined)
|
.map(([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : undefined)
|
||||||
.filter((f) => f !== undefined &&
|
.filter((f) => f !== undefined &&
|
||||||
// Only consider versions that have semantically versioned bundles.
|
// Only consider versions that have semantically versioned bundles.
|
||||||
semver.gte(f, codeql_1.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED))
|
semver.gte(f, exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED))
|
||||||
.map((f) => f);
|
.map((f) => f);
|
||||||
if (enabledFeatureFlagCliVersions.length === 0) {
|
if (enabledFeatureFlagCliVersions.length === 0) {
|
||||||
// We expect at least one default CLI version to be enabled on Dotcom at any time. However if
|
// We expect at least one default CLI version to be enabled on Dotcom at any time. However if
|
||||||
@@ -330,4 +337,21 @@ class GitHubFeatureFlags {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* @returns Whether the Action should generate a code scanning config file
|
||||||
|
* that gets passed to the CLI.
|
||||||
|
*/
|
||||||
|
async function useCodeScanningConfigInCli(codeql, features) {
|
||||||
|
return await features.getValue(Feature.CliConfigFileEnabled, codeql);
|
||||||
|
}
|
||||||
|
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
||||||
|
async function logCodeScanningConfigInCli(codeql, features, logger) {
|
||||||
|
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||||
|
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info("Code Scanning configuration file being processed in the codeql-action.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.logCodeScanningConfigInCli = logCodeScanningConfigInCli;
|
||||||
//# sourceMappingURL=feature-flags.js.map
|
//# sourceMappingURL=feature-flags.js.map
|
||||||
File diff suppressed because one or more lines are too long
6
lib/init-action-post.js
generated
6
lib/init-action-post.js
generated
@@ -51,15 +51,15 @@ async function runWrapper() {
|
|||||||
catch (unwrappedError) {
|
catch (unwrappedError) {
|
||||||
const error = (0, util_1.wrapError)(unwrappedError);
|
const error = (0, util_1.wrapError)(unwrappedError);
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init-post", (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init-post", (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("init-post", "success", startedAt);
|
const statusReportBase = await (0, api_client_1.createStatusReportBase)("init-post", "success", startedAt);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...uploadFailedSarifResult,
|
...uploadFailedSarifResult,
|
||||||
};
|
};
|
||||||
await (0, actions_util_1.sendStatusReport)(statusReport);
|
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
void runWrapper();
|
void runWrapper();
|
||||||
//# sourceMappingURL=init-action-post.js.map
|
//# sourceMappingURL=init-action-post.js.map
|
||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAIgB;AAMhB,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,IAAA,+BAAgB,EACpB,MAAM,IAAA,qCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAKwB;AACxB,6CAIsB;AACtB,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAIgB;AAMhB,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
18
lib/init-action.js
generated
18
lib/init-action.js
generated
@@ -28,23 +28,25 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const uuid_1 = require("uuid");
|
const uuid_1 = require("uuid");
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
const api_client_1 = require("./api-client");
|
const api_client_1 = require("./api-client");
|
||||||
|
const config_utils_1 = require("./config-utils");
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const init_1 = require("./init");
|
const init_1 = require("./init");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
|
const setup_codeql_1 = require("./setup-codeql");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
const workflow_1 = require("./workflow");
|
const workflow_1 = require("./workflow");
|
||||||
async function sendCompletedStatusReport(startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger, error) {
|
async function sendCompletedStatusReport(startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger, error) {
|
||||||
const statusReportBase = await (0, actions_util_1.createStatusReportBase)("init", (0, actions_util_1.getActionsStatus)(error), startedAt, error?.message, error?.stack);
|
const statusReportBase = await (0, api_client_1.createStatusReportBase)("init", (0, actions_util_1.getActionsStatus)(error), startedAt, error?.message, error?.stack);
|
||||||
const workflowLanguages = (0, actions_util_1.getOptionalInput)("languages");
|
const workflowLanguages = (0, actions_util_1.getOptionalInput)("languages");
|
||||||
const initStatusReport = {
|
const initStatusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
|
tools_input: (0, actions_util_1.getOptionalInput)("tools") || "",
|
||||||
tools_resolved_version: toolsVersion,
|
tools_resolved_version: toolsVersion,
|
||||||
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
tools_source: toolsSource || setup_codeql_1.ToolsSource.Unknown,
|
||||||
workflow_languages: workflowLanguages || "",
|
workflow_languages: workflowLanguages || "",
|
||||||
};
|
};
|
||||||
const initToolsDownloadFields = {};
|
const initToolsDownloadFields = {};
|
||||||
@@ -78,7 +80,7 @@ async function sendCompletedStatusReport(startedAt, config, toolsDownloadDuratio
|
|||||||
...initStatusReport,
|
...initStatusReport,
|
||||||
disable_default_queries: disableDefaultQueries,
|
disable_default_queries: disableDefaultQueries,
|
||||||
languages,
|
languages,
|
||||||
ml_powered_javascript_queries: (0, util_1.getMlPoweredJsQueriesStatus)(config),
|
ml_powered_javascript_queries: (0, config_utils_1.getMlPoweredJsQueriesStatus)(config),
|
||||||
paths,
|
paths,
|
||||||
paths_ignore: pathsIgnore,
|
paths_ignore: pathsIgnore,
|
||||||
queries: queries.join(","),
|
queries: queries.join(","),
|
||||||
@@ -86,13 +88,13 @@ async function sendCompletedStatusReport(startedAt, config, toolsDownloadDuratio
|
|||||||
trap_cache_download_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
trap_cache_download_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
||||||
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
|
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
|
||||||
};
|
};
|
||||||
await (0, actions_util_1.sendStatusReport)({
|
await (0, api_client_1.sendStatusReport)({
|
||||||
...initWithConfigStatusReport,
|
...initWithConfigStatusReport,
|
||||||
...initToolsDownloadFields,
|
...initToolsDownloadFields,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
await (0, actions_util_1.sendStatusReport)({ ...initStatusReport, ...initToolsDownloadFields });
|
await (0, api_client_1.sendStatusReport)({ ...initStatusReport, ...initToolsDownloadFields });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
@@ -119,7 +121,7 @@ async function run() {
|
|||||||
core.exportVariable(environment_1.EnvVar.JOB_RUN_UUID, (0, uuid_1.v4)());
|
core.exportVariable(environment_1.EnvVar.JOB_RUN_UUID, (0, uuid_1.v4)());
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await (0, workflow_1.validateWorkflow)(logger);
|
const workflowErrors = await (0, workflow_1.validateWorkflow)(logger);
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(gitHubVersion.type);
|
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(gitHubVersion.type);
|
||||||
@@ -154,7 +156,7 @@ async function run() {
|
|||||||
catch (unwrappedError) {
|
catch (unwrappedError) {
|
||||||
const error = (0, util_1.wrapError)(unwrappedError);
|
const error = (0, util_1.wrapError)(unwrappedError);
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", error instanceof util_1.UserError ? "user-error" : "aborted", startedAt, error.message, error.stack));
|
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init", error instanceof util_1.UserError ? "user-error" : "aborted", startedAt, error.message, error.stack));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -170,7 +172,7 @@ async function run() {
|
|||||||
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
|
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
|
||||||
// for details.
|
// for details.
|
||||||
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
|
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
|
||||||
(await (0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram"), features)).toString());
|
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram"), await features.getValue(feature_flags_1.Feature.ScalingReservedRamEnabled)).toString());
|
||||||
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
||||||
// Disable Kotlin extractor if feature flag set
|
// Disable Kotlin extractor if feature flag set
|
||||||
if (await features.getValue(feature_flags_1.Feature.DisableKotlinAnalysisEnabled)) {
|
if (await features.getValue(feature_flags_1.Feature.DisableKotlinAnalysisEnabled)) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
12
lib/init.js
generated
12
lib/init.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.installPythonDeps = exports.runInit = exports.initConfig = exports.initCodeQL = exports.ToolsSource = void 0;
|
exports.installPythonDeps = exports.runInit = exports.initConfig = exports.initCodeQL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
@@ -31,15 +31,9 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
|||||||
const analysisPaths = __importStar(require("./analysis-paths"));
|
const analysisPaths = __importStar(require("./analysis-paths"));
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const tracer_config_1 = require("./tracer-config");
|
const tracer_config_1 = require("./tracer-config");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
var ToolsSource;
|
|
||||||
(function (ToolsSource) {
|
|
||||||
ToolsSource["Unknown"] = "UNKNOWN";
|
|
||||||
ToolsSource["Local"] = "LOCAL";
|
|
||||||
ToolsSource["Toolcache"] = "TOOLCACHE";
|
|
||||||
ToolsSource["Download"] = "DOWNLOAD";
|
|
||||||
})(ToolsSource || (exports.ToolsSource = ToolsSource = {}));
|
|
||||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
||||||
@@ -65,7 +59,7 @@ async function runInit(codeql, config, sourceRoot, processName, registriesInput,
|
|||||||
// before the `pack download` command was invoked. It is not required for the init command.
|
// before the `pack download` command was invoked. It is not required for the init command.
|
||||||
let registriesAuthTokens;
|
let registriesAuthTokens;
|
||||||
let qlconfigFile;
|
let qlconfigFile;
|
||||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
if (await (0, feature_flags_1.useCodeScanningConfigInCli)(codeql, features)) {
|
||||||
({ registriesAuthTokens, qlconfigFile } =
|
({ registriesAuthTokens, qlconfigFile } =
|
||||||
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAE/B,IAAY,WAKX;AALD,WAAY,WAAW;IACrB,kCAAmB,CAAA;IACnB,8BAAe,CAAA;IACf,sCAAuB,CAAA;IACvB,oCAAqB,CAAA;AACvB,CAAC,EALW,WAAW,2BAAX,WAAW,QAKtB;AAEM,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,MAAc;IAOd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,GAClE,MAAM,IAAA,oBAAW,EACf,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACJ,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;AACxE,CAAC;AA3BD,gCA2BC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,WAA+B,EAC/B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,QAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,WAAW,EACX,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,QAAQ,EACR,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9CD,gCA8CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,QAA2B,EAC3B,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACrD,IAAI;QACF,wFAAwF;QACxF,qBAAqB;QACrB,8FAA8F;QAC9F,2FAA2F;QAC3F,IAAI,oBAAwC,CAAC;QAC7C,IAAI,YAAgC,CAAC;QACrC,IAAI,MAAM,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;YAC3D,CAAC,EAAE,oBAAoB,EAAE,YAAY,EAAE;gBACrC,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,EACN,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC,CAAC;SACN;QACD,MAAM,WAAW,CAAC,eAAe,CAC/B;YACE,YAAY,EAAE,UAAU,CAAC,IAAI;YAC7B,sBAAsB,EAAE,oBAAoB;SAC7C;QAED,0BAA0B;QAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,QAAQ,EACR,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,CAAC,CAAC;AAC/C,CAAC;AAhDD,0BAgDC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,8BAA8B,CAAC;QACnD,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,uCAAuC,CAAC,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,wCAAwC,CAAC;QAC7D,gEAAgE;QAChE,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,qBAAqB,CAAC,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAC9C,mDAIyB;AAIzB,mDAAwE;AACxE,6CAA+B;AAExB,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,MAAc;IAOd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,GAClE,MAAM,IAAA,oBAAW,EACf,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACJ,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;AACxE,CAAC;AA3BD,gCA2BC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,WAA+B,EAC/B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,QAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,WAAW,EACX,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,QAAQ,EACR,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9CD,gCA8CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,QAA2B,EAC3B,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACrD,IAAI;QACF,wFAAwF;QACxF,qBAAqB;QACrB,8FAA8F;QAC9F,2FAA2F;QAC3F,IAAI,oBAAwC,CAAC;QAC7C,IAAI,YAAgC,CAAC;QACrC,IAAI,MAAM,IAAA,0CAA0B,EAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;YACtD,CAAC,EAAE,oBAAoB,EAAE,YAAY,EAAE;gBACrC,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,EACN,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC,CAAC;SACN;QACD,MAAM,WAAW,CAAC,eAAe,CAC/B;YACE,YAAY,EAAE,UAAU,CAAC,IAAI;YAC7B,sBAAsB,EAAE,oBAAoB;SAC7C;QAED,0BAA0B;QAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,QAAQ,EACR,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,CAAC,CAAC;AAC/C,CAAC;AAhDD,0BAgDC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,8BAA8B,CAAC;QACnD,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,uCAAuC,CAAC,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,wCAAwC,CAAC;QAC7D,gEAAgE;QAChE,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,qBAAqB,CAAC,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||||
6
lib/resolve-environment-action.js
generated
6
lib/resolve-environment-action.js
generated
@@ -39,7 +39,7 @@ async function run() {
|
|||||||
const logger = (0, logging_1.getActionsLogger)();
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
const language = (0, languages_1.resolveAlias)((0, actions_util_1.getRequiredInput)("language"));
|
const language = (0, languages_1.resolveAlias)((0, actions_util_1.getRequiredInput)("language"));
|
||||||
try {
|
try {
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)(ACTION_NAME, "starting", startedAt)))) {
|
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
@@ -63,11 +63,11 @@ async function run() {
|
|||||||
else {
|
else {
|
||||||
// For any other error types, something has more seriously gone wrong and we fail.
|
// For any other error types, something has more seriously gone wrong and we fail.
|
||||||
core.setFailed(`Failed to resolve a build environment suitable for automatically building your code. ${error.message}`);
|
core.setFailed(`Failed to resolve a build environment suitable for automatically building your code. ${error.message}`);
|
||||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)(ACTION_NAME, (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)(ACTION_NAME, "success", startedAt));
|
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, "success", startedAt));
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAAgD;AAChD,qCAAkD;AAClD,4DAA8C;AAC9C,2CAAqD;AACrD,uCAA6C;AAC7C,+DAAmE;AACnE,iCAA+E;AAE/E,MAAM,WAAW,GAAG,qBAAqB,CAAC;AAC1C,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,QAAQ,GAAa,IAAA,wBAAY,EAAC,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAAC,CAAC;IAEtE,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,QAAQ,CACT,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;KACjD;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,+BAAsB,EAAE;YAC3C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;SACH;aAAM;YACL,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,IAAA,+BAAgB,EACpB,MAAM,IAAA,qCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;SACH;QAED,OAAO;KACR;IAED,MAAM,IAAA,+BAAgB,EACpB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,SAAS,EAAE,SAAS,CAAC,CAChE,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,GAAG,WAAW,mBAAmB,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC7E;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAIsB;AACtB,qCAAkD;AAClD,4DAA8C;AAC9C,2CAAqD;AACrD,uCAA6C;AAC7C,+DAAmE;AACnE,iCAA+E;AAE/E,MAAM,WAAW,GAAG,qBAAqB,CAAC;AAC1C,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,QAAQ,GAAa,IAAA,wBAAY,EAAC,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAAC,CAAC;IAEtE,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,QAAQ,CACT,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;KACjD;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,+BAAsB,EAAE;YAC3C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;SACH;aAAM;YACL,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;SACH;QAED,OAAO;KACR;IAED,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,SAAS,EAAE,SAAS,CAAC,CAChE,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,GAAG,WAAW,mBAAmB,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC7E;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
16
lib/setup-codeql.js
generated
16
lib/setup-codeql.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.tryGetFallbackToolcacheVersion = exports.getCodeQLSource = exports.convertToSemVer = exports.tryGetBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.tryGetFallbackToolcacheVersion = exports.getCodeQLSource = exports.convertToSemVer = exports.tryGetBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.ToolsSource = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -40,9 +40,15 @@ const api = __importStar(require("./api-client"));
|
|||||||
// creation scripts. Ensure that any changes to the format of this file are compatible with both of
|
// creation scripts. Ensure that any changes to the format of this file are compatible with both of
|
||||||
// these dependents.
|
// these dependents.
|
||||||
const defaults = __importStar(require("./defaults.json"));
|
const defaults = __importStar(require("./defaults.json"));
|
||||||
const init_1 = require("./init");
|
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
var ToolsSource;
|
||||||
|
(function (ToolsSource) {
|
||||||
|
ToolsSource["Unknown"] = "UNKNOWN";
|
||||||
|
ToolsSource["Local"] = "LOCAL";
|
||||||
|
ToolsSource["Toolcache"] = "TOOLCACHE";
|
||||||
|
ToolsSource["Download"] = "DOWNLOAD";
|
||||||
|
})(ToolsSource || (exports.ToolsSource = ToolsSource = {}));
|
||||||
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
exports.CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||||
function getCodeQLBundleName() {
|
function getCodeQLBundleName() {
|
||||||
let platform;
|
let platform;
|
||||||
@@ -483,19 +489,19 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
|||||||
switch (source.sourceType) {
|
switch (source.sourceType) {
|
||||||
case "local":
|
case "local":
|
||||||
codeqlFolder = await toolcache.extractTar(source.codeqlTarPath);
|
codeqlFolder = await toolcache.extractTar(source.codeqlTarPath);
|
||||||
toolsSource = init_1.ToolsSource.Local;
|
toolsSource = ToolsSource.Local;
|
||||||
break;
|
break;
|
||||||
case "toolcache":
|
case "toolcache":
|
||||||
codeqlFolder = source.codeqlFolder;
|
codeqlFolder = source.codeqlFolder;
|
||||||
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
logger.debug(`CodeQL found in cache ${codeqlFolder}`);
|
||||||
toolsSource = init_1.ToolsSource.Toolcache;
|
toolsSource = ToolsSource.Toolcache;
|
||||||
break;
|
break;
|
||||||
case "download": {
|
case "download": {
|
||||||
const result = await downloadCodeQL(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, variant, tempDir, logger);
|
const result = await downloadCodeQL(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, variant, tempDir, logger);
|
||||||
toolsVersion = result.toolsVersion;
|
toolsVersion = result.toolsVersion;
|
||||||
codeqlFolder = result.codeqlFolder;
|
codeqlFolder = result.codeqlFolder;
|
||||||
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
||||||
toolsSource = init_1.ToolsSource.Download;
|
toolsSource = ToolsSource.Download;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
23
lib/trap-caching.js
generated
23
lib/trap-caching.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getTotalCacheSize = exports.getLanguagesSupportingCaching = exports.uploadTrapCaches = exports.downloadTrapCaches = exports.getTrapCachingExtractorConfigArgsForLang = exports.getTrapCachingExtractorConfigArgs = void 0;
|
exports.getTotalCacheSize = exports.getLanguagesSupportingCaching = exports.uploadTrapCaches = exports.downloadTrapCaches = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const cache = __importStar(require("@actions/cache"));
|
const cache = __importStar(require("@actions/cache"));
|
||||||
@@ -36,8 +36,6 @@ const util_1 = require("./util");
|
|||||||
// this for CLI/extractor changes, since the CLI version also
|
// this for CLI/extractor changes, since the CLI version also
|
||||||
// goes into the cache key.
|
// goes into the cache key.
|
||||||
const CACHE_VERSION = 1;
|
const CACHE_VERSION = 1;
|
||||||
// This constant sets the size of each TRAP cache in megabytes.
|
|
||||||
const CACHE_SIZE_MB = 1024;
|
|
||||||
// This constant sets the minimum size in megabytes of a TRAP
|
// This constant sets the minimum size in megabytes of a TRAP
|
||||||
// cache for us to consider it worth uploading.
|
// cache for us to consider it worth uploading.
|
||||||
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
||||||
@@ -46,25 +44,6 @@ const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
|||||||
// this timeout is per operation, so will be run as many
|
// this timeout is per operation, so will be run as many
|
||||||
// times as there are languages with TRAP caching enabled.
|
// times as there are languages with TRAP caching enabled.
|
||||||
const MAX_CACHE_OPERATION_MS = 120000; // Two minutes
|
const MAX_CACHE_OPERATION_MS = 120000; // Two minutes
|
||||||
async function getTrapCachingExtractorConfigArgs(config) {
|
|
||||||
const result = [];
|
|
||||||
for (const language of config.languages)
|
|
||||||
result.push(await getTrapCachingExtractorConfigArgsForLang(config, language));
|
|
||||||
return result.flat();
|
|
||||||
}
|
|
||||||
exports.getTrapCachingExtractorConfigArgs = getTrapCachingExtractorConfigArgs;
|
|
||||||
async function getTrapCachingExtractorConfigArgsForLang(config, language) {
|
|
||||||
const cacheDir = config.trapCaches[language];
|
|
||||||
if (cacheDir === undefined)
|
|
||||||
return [];
|
|
||||||
const write = await actionsUtil.isAnalyzingDefaultBranch();
|
|
||||||
return [
|
|
||||||
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
|
||||||
`-O=${language}.trap.cache.bound=${CACHE_SIZE_MB}`,
|
|
||||||
`-O=${language}.trap.cache.write=${write}`,
|
|
||||||
];
|
|
||||||
}
|
|
||||||
exports.getTrapCachingExtractorConfigArgsForLang = getTrapCachingExtractorConfigArgsForLang;
|
|
||||||
/**
|
/**
|
||||||
* Download TRAP caches from the Actions cache.
|
* Download TRAP caches from the Actions cache.
|
||||||
* @param codeql The CodeQL instance to use.
|
* @param codeql The CodeQL instance to use.
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
4
lib/trap-caching.test.js
generated
4
lib/trap-caching.test.js
generated
@@ -135,7 +135,7 @@ function getTestConfigWithTempDir(tmpDir) {
|
|||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfigWithTempDir(tmpDir);
|
const config = getTestConfigWithTempDir(tmpDir);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
const result = await (0, trap_caching_1.getTrapCachingExtractorConfigArgsForLang)(config, languages_1.Language.javascript);
|
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgsForLang)(config, languages_1.Language.javascript);
|
||||||
t.deepEqual(result, [
|
t.deepEqual(result, [
|
||||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||||
"-O=javascript.trap.cache.bound=1024",
|
"-O=javascript.trap.cache.bound=1024",
|
||||||
@@ -147,7 +147,7 @@ function getTestConfigWithTempDir(tmpDir) {
|
|||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
const config = getTestConfigWithTempDir(tmpDir);
|
const config = getTestConfigWithTempDir(tmpDir);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(false);
|
||||||
const result = await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config);
|
const result = await (0, codeql_1.getTrapCachingExtractorConfigArgs)(config);
|
||||||
t.deepEqual(result, [
|
t.deepEqual(result, [
|
||||||
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
`-O=javascript.trap.cache.dir=${path.resolve(tmpDir, "jsCache")}`,
|
||||||
"-O=javascript.trap.cache.bound=1024",
|
"-O=javascript.trap.cache.bound=1024",
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
5
lib/upload-lib.js
generated
5
lib/upload-lib.js
generated
@@ -41,7 +41,6 @@ const fingerprints = __importStar(require("./fingerprints"));
|
|||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
const workflow = __importStar(require("./workflow"));
|
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
function combineSarifFiles(sarifFiles) {
|
function combineSarifFiles(sarifFiles) {
|
||||||
@@ -87,7 +86,7 @@ function getAutomationID(category, analysis_key, environment) {
|
|||||||
}
|
}
|
||||||
return automationID;
|
return automationID;
|
||||||
}
|
}
|
||||||
return actionsUtil.computeAutomationID(analysis_key, environment);
|
return api.computeAutomationID(analysis_key, environment);
|
||||||
}
|
}
|
||||||
// Upload the given payload.
|
// Upload the given payload.
|
||||||
// If the request fails then this will retry a small number of times.
|
// If the request fails then this will retry a small number of times.
|
||||||
@@ -133,7 +132,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
|||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), workflow.getWorkflowRunAttempt(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await api.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getWorkflowRunAttempt(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||||
}
|
}
|
||||||
exports.uploadFromActions = uploadFromActions;
|
exports.uploadFromActions = uploadFromActions;
|
||||||
function getSarifFilePaths(sarifPath) {
|
function getSarifFilePaths(sarifPath) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
9
lib/upload-sarif-action.js
generated
9
lib/upload-sarif-action.js
generated
@@ -26,22 +26,23 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase("upload-sarif", "success", startedAt);
|
const statusReportBase = await (0, api_client_1.createStatusReportBase)("upload-sarif", "success", startedAt);
|
||||||
const statusReport = {
|
const statusReport = {
|
||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...uploadStats,
|
...uploadStats,
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||||
}
|
}
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
||||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", "starting", startedAt)))) {
|
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("upload-sarif", "starting", startedAt)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -61,7 +62,7 @@ async function run() {
|
|||||||
const message = error.message;
|
const message = error.message;
|
||||||
core.setFailed(message);
|
core.setFailed(message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("upload-sarif", actionsUtil.getActionsStatus(error), startedAt, message, error.stack));
|
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("upload-sarif", actionsUtil.getActionsStatus(error), startedAt, message, error.stack));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAkD;AAClD,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAC1C,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACjE,CAAC;KACH;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAkD;AAClD,6CAAwE;AACxE,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAC1C,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,CACpE,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACjE,CAAC;KACH;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
159
lib/util.js
generated
159
lib/util.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.wrapError = exports.fixInvalidNotificationsInFile = exports.fixInvalidNotifications = exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.initializeEnvironment = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.prettyPrintPack = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.wrapError = exports.fixInvalidNotificationsInFile = exports.fixInvalidNotifications = exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.isInTestMode = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.initializeEnvironment = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.getMemoryFlagValueForPlatform = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -35,11 +35,8 @@ const core = __importStar(require("@actions/core"));
|
|||||||
const del_1 = __importDefault(require("del"));
|
const del_1 = __importDefault(require("del"));
|
||||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||||
const semver = __importStar(require("semver"));
|
const semver = __importStar(require("semver"));
|
||||||
const api_client_1 = require("./api-client");
|
|
||||||
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
||||||
const config_utils_1 = require("./config-utils");
|
|
||||||
const environment_1 = require("./environment");
|
const environment_1 = require("./environment");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
|
||||||
/**
|
/**
|
||||||
* Specifies bundle versions that are known to be broken
|
* Specifies bundle versions that are known to be broken
|
||||||
* and will not be used if found in the toolcache.
|
* and will not be used if found in the toolcache.
|
||||||
@@ -108,13 +105,13 @@ exports.withTmpDir = withTmpDir;
|
|||||||
* from committing too much of the available memory to CodeQL.
|
* from committing too much of the available memory to CodeQL.
|
||||||
* @returns number
|
* @returns number
|
||||||
*/
|
*/
|
||||||
async function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, features) {
|
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform, isScalingReservedRamEnabled) {
|
||||||
// Windows needs more memory for OS processes.
|
// Windows needs more memory for OS processes.
|
||||||
const fixedAmount = 1024 * (process.platform === "win32" ? 1.5 : 1);
|
const fixedAmount = 1024 * (platform === "win32" ? 1.5 : 1);
|
||||||
if (await features.getValue(feature_flags_1.Feature.ScalingReservedRamEnabled)) {
|
if (isScalingReservedRamEnabled) {
|
||||||
// Reserve an additional 2% of the total memory, since the amount used by
|
// Reserve an additional 2.5% of the amount of memory above 8 GB, since the amount used by
|
||||||
// the kernel for page tables scales with the size of physical memory.
|
// the kernel for page tables scales with the size of physical memory.
|
||||||
const scaledAmount = 0.02 * totalMemoryMegaBytes;
|
const scaledAmount = 0.025 * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
|
||||||
return fixedAmount + scaledAmount;
|
return fixedAmount + scaledAmount;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -128,7 +125,7 @@ async function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, features)
|
|||||||
*
|
*
|
||||||
* @returns {number} the amount of RAM to use, in megabytes
|
* @returns {number} the amount of RAM to use, in megabytes
|
||||||
*/
|
*/
|
||||||
async function getMemoryFlagValue(userInput, features) {
|
function getMemoryFlagValueForPlatform(userInput, totalMemoryBytes, platform, isScalingReservedRamEnabled) {
|
||||||
let memoryToUseMegaBytes;
|
let memoryToUseMegaBytes;
|
||||||
if (userInput) {
|
if (userInput) {
|
||||||
memoryToUseMegaBytes = Number(userInput);
|
memoryToUseMegaBytes = Number(userInput);
|
||||||
@@ -137,13 +134,23 @@ async function getMemoryFlagValue(userInput, features) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
const totalMemoryBytes = os.totalmem();
|
|
||||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
const reservedMemoryMegaBytes = await getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, features);
|
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform, isScalingReservedRamEnabled);
|
||||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||||
}
|
}
|
||||||
return Math.floor(memoryToUseMegaBytes);
|
return Math.floor(memoryToUseMegaBytes);
|
||||||
}
|
}
|
||||||
|
exports.getMemoryFlagValueForPlatform = getMemoryFlagValueForPlatform;
|
||||||
|
/**
|
||||||
|
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
|
||||||
|
* If no value was specified, the total available memory will be used minus a
|
||||||
|
* threshold reserved for the OS.
|
||||||
|
*
|
||||||
|
* @returns {number} the amount of RAM to use, in megabytes
|
||||||
|
*/
|
||||||
|
function getMemoryFlagValue(userInput, isScalingReservedRamEnabled) {
|
||||||
|
return getMemoryFlagValueForPlatform(userInput, os.totalmem(), process.platform, isScalingReservedRamEnabled);
|
||||||
|
}
|
||||||
exports.getMemoryFlagValue = getMemoryFlagValue;
|
exports.getMemoryFlagValue = getMemoryFlagValue;
|
||||||
/**
|
/**
|
||||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||||
@@ -152,8 +159,8 @@ exports.getMemoryFlagValue = getMemoryFlagValue;
|
|||||||
*
|
*
|
||||||
* @returns string
|
* @returns string
|
||||||
*/
|
*/
|
||||||
async function getMemoryFlag(userInput, features) {
|
function getMemoryFlag(userInput, isScalingReservedRamEnabled) {
|
||||||
const megabytes = await getMemoryFlagValue(userInput, features);
|
const megabytes = getMemoryFlagValue(userInput, isScalingReservedRamEnabled);
|
||||||
return `--ram=${megabytes}`;
|
return `--ram=${megabytes}`;
|
||||||
}
|
}
|
||||||
exports.getMemoryFlag = getMemoryFlag;
|
exports.getMemoryFlag = getMemoryFlag;
|
||||||
@@ -261,7 +268,6 @@ function parseGitHubUrl(inputUrl) {
|
|||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
exports.parseGitHubUrl = parseGitHubUrl;
|
exports.parseGitHubUrl = parseGitHubUrl;
|
||||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
|
||||||
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR = "CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
||||||
let hasBeenWarnedAboutVersion = false;
|
let hasBeenWarnedAboutVersion = false;
|
||||||
var GitHubVariant;
|
var GitHubVariant;
|
||||||
@@ -271,30 +277,6 @@ var GitHubVariant;
|
|||||||
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
||||||
GitHubVariant[GitHubVariant["GHE_DOTCOM"] = 3] = "GHE_DOTCOM";
|
GitHubVariant[GitHubVariant["GHE_DOTCOM"] = 3] = "GHE_DOTCOM";
|
||||||
})(GitHubVariant || (exports.GitHubVariant = GitHubVariant = {}));
|
})(GitHubVariant || (exports.GitHubVariant = GitHubVariant = {}));
|
||||||
async function getGitHubVersion(apiDetails) {
|
|
||||||
// We can avoid making an API request in the standard dotcom case
|
|
||||||
if (parseGitHubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
|
|
||||||
return { type: GitHubVariant.DOTCOM };
|
|
||||||
}
|
|
||||||
// Doesn't strictly have to be the meta endpoint as we're only
|
|
||||||
// using the response headers which are available on every request.
|
|
||||||
const apiClient = (0, api_client_1.getApiClient)();
|
|
||||||
const response = await apiClient.rest.meta.get();
|
|
||||||
// This happens on dotcom, although we expect to have already returned in that
|
|
||||||
// case. This can also serve as a fallback in cases we haven't foreseen.
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
|
||||||
return { type: GitHubVariant.DOTCOM };
|
|
||||||
}
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
|
||||||
return { type: GitHubVariant.GHAE };
|
|
||||||
}
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
|
||||||
return { type: GitHubVariant.GHE_DOTCOM };
|
|
||||||
}
|
|
||||||
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
|
||||||
return { type: GitHubVariant.GHES, version };
|
|
||||||
}
|
|
||||||
exports.getGitHubVersion = getGitHubVersion;
|
|
||||||
function checkGitHubVersionInRange(version, logger) {
|
function checkGitHubVersionInRange(version, logger) {
|
||||||
if (hasBeenWarnedAboutVersion || version.type !== GitHubVariant.GHES) {
|
if (hasBeenWarnedAboutVersion || version.type !== GitHubVariant.GHES) {
|
||||||
return;
|
return;
|
||||||
@@ -445,65 +427,6 @@ async function supportExpectDiscardedCache(codeQL) {
|
|||||||
return codeQlVersionAbove(codeQL, "2.12.1");
|
return codeQlVersionAbove(codeQL, "2.12.1");
|
||||||
}
|
}
|
||||||
exports.supportExpectDiscardedCache = supportExpectDiscardedCache;
|
exports.supportExpectDiscardedCache = supportExpectDiscardedCache;
|
||||||
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
|
||||||
/**
|
|
||||||
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
|
||||||
* queries beta.
|
|
||||||
*/
|
|
||||||
async function getMlPoweredJsQueriesPack(codeQL) {
|
|
||||||
let version;
|
|
||||||
if (await codeQlVersionAbove(codeQL, "2.11.3")) {
|
|
||||||
version = "~0.4.0";
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
version = `~0.3.0`;
|
|
||||||
}
|
|
||||||
return (0, config_utils_1.prettyPrintPack)({
|
|
||||||
name: exports.ML_POWERED_JS_QUERIES_PACK_NAME,
|
|
||||||
version,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
|
||||||
/**
|
|
||||||
* Get information about ML-powered JS queries to populate status reports with.
|
|
||||||
*
|
|
||||||
* This will be:
|
|
||||||
*
|
|
||||||
* - The version string if the analysis is using a single version of the ML-powered query pack.
|
|
||||||
* - "latest" if the version string of the ML-powered query pack is undefined. This is unlikely to
|
|
||||||
* occur in practice (see comment below).
|
|
||||||
* - "false" if the analysis won't run any ML-powered JS queries.
|
|
||||||
* - "other" in all other cases.
|
|
||||||
*
|
|
||||||
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
|
|
||||||
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
|
|
||||||
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
|
|
||||||
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
|
||||||
* version strings.
|
|
||||||
*
|
|
||||||
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
|
||||||
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
|
||||||
* explanation as to why this is.
|
|
||||||
*/
|
|
||||||
function getMlPoweredJsQueriesStatus(config) {
|
|
||||||
const mlPoweredJsQueryPacks = (config.packs.javascript || [])
|
|
||||||
.map((p) => (0, config_utils_1.parsePacksSpecification)(p))
|
|
||||||
.filter((pack) => pack.name === "codeql/javascript-experimental-atm-queries" && !pack.path);
|
|
||||||
switch (mlPoweredJsQueryPacks.length) {
|
|
||||||
case 1:
|
|
||||||
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
|
||||||
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
|
||||||
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
|
||||||
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
|
||||||
// CodeQL config.
|
|
||||||
return mlPoweredJsQueryPacks[0].version || "latest";
|
|
||||||
case 0:
|
|
||||||
return "false";
|
|
||||||
default:
|
|
||||||
return "other";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.getMlPoweredJsQueriesStatus = getMlPoweredJsQueriesStatus;
|
|
||||||
/*
|
/*
|
||||||
* Returns whether we are in test mode.
|
* Returns whether we are in test mode.
|
||||||
*
|
*
|
||||||
@@ -513,23 +436,6 @@ function isInTestMode() {
|
|||||||
return process.env[environment_1.EnvVar.TEST_MODE] === "true";
|
return process.env[environment_1.EnvVar.TEST_MODE] === "true";
|
||||||
}
|
}
|
||||||
exports.isInTestMode = isInTestMode;
|
exports.isInTestMode = isInTestMode;
|
||||||
/**
|
|
||||||
* @returns true if the action should generate a conde-scanning config file
|
|
||||||
* that gets passed to the CLI.
|
|
||||||
*/
|
|
||||||
async function useCodeScanningConfigInCli(codeql, features) {
|
|
||||||
return await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
|
||||||
}
|
|
||||||
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
|
||||||
async function logCodeScanningConfigInCli(codeql, features, logger) {
|
|
||||||
if (await useCodeScanningConfigInCli(codeql, features)) {
|
|
||||||
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
logger.info("Code Scanning configuration file being processed in the codeql-action.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.logCodeScanningConfigInCli = logCodeScanningConfigInCli;
|
|
||||||
/*
|
/*
|
||||||
* Returns whether the path in the argument represents an existing directory.
|
* Returns whether the path in the argument represents an existing directory.
|
||||||
*/
|
*/
|
||||||
@@ -753,4 +659,27 @@ function wrapError(error) {
|
|||||||
return error instanceof Error ? error : new Error(String(error));
|
return error instanceof Error ? error : new Error(String(error));
|
||||||
}
|
}
|
||||||
exports.wrapError = wrapError;
|
exports.wrapError = wrapError;
|
||||||
|
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
||||||
|
/**
|
||||||
|
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||||
|
* queries beta.
|
||||||
|
*/
|
||||||
|
async function getMlPoweredJsQueriesPack(codeQL) {
|
||||||
|
let version;
|
||||||
|
if (await codeQlVersionAbove(codeQL, "2.11.3")) {
|
||||||
|
version = "~0.4.0";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
version = `~0.3.0`;
|
||||||
|
}
|
||||||
|
return prettyPrintPack({
|
||||||
|
name: exports.ML_POWERED_JS_QUERIES_PACK_NAME,
|
||||||
|
version,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.getMlPoweredJsQueriesPack = getMlPoweredJsQueriesPack;
|
||||||
|
function prettyPrintPack(pack) {
|
||||||
|
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
|
||||||
|
}
|
||||||
|
exports.prettyPrintPack = prettyPrintPack;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
192
lib/util.test.js
generated
192
lib/util.test.js
generated
@@ -29,11 +29,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path_1 = __importDefault(require("path"));
|
const path_1 = __importDefault(require("path"));
|
||||||
const github = __importStar(require("@actions/github"));
|
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
|
||||||
const api = __importStar(require("./api-client"));
|
|
||||||
const feature_flags_1 = require("./feature-flags");
|
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
@@ -43,28 +39,62 @@ const util = __importStar(require("./util"));
|
|||||||
const toolNames = util.getToolNames(JSON.parse(input));
|
const toolNames = util.getToolNames(JSON.parse(input));
|
||||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", async (t) => {
|
const GET_MEMORY_FLAG_TESTS = [
|
||||||
const totalMem = os.totalmem() / (1024 * 1024);
|
{
|
||||||
const fixedAmount = process.platform === "win32" ? 1536 : 1024;
|
input: undefined,
|
||||||
const scaledAmount = 0.02 * totalMem;
|
totalMemoryMb: 8 * 1024,
|
||||||
const expectedMemoryValue = Math.floor(totalMem - fixedAmount);
|
platform: "linux",
|
||||||
const expectedMemoryValueWithScaling = Math.floor(totalMem - fixedAmount - scaledAmount);
|
expectedMemoryValue: 7 * 1024,
|
||||||
const tests = [
|
expectedMemoryValueWithScaling: 7 * 1024,
|
||||||
[undefined, false, `--ram=${expectedMemoryValue}`],
|
},
|
||||||
["", false, `--ram=${expectedMemoryValue}`],
|
{
|
||||||
["512", false, "--ram=512"],
|
input: undefined,
|
||||||
[undefined, true, `--ram=${expectedMemoryValueWithScaling}`],
|
totalMemoryMb: 8 * 1024,
|
||||||
["", true, `--ram=${expectedMemoryValueWithScaling}`],
|
platform: "win32",
|
||||||
];
|
expectedMemoryValue: 6.5 * 1024,
|
||||||
for (const [input, withScaling, expectedFlag] of tests) {
|
expectedMemoryValueWithScaling: 6.5 * 1024,
|
||||||
const features = (0, testing_utils_1.createFeatures)(withScaling ? [feature_flags_1.Feature.ScalingReservedRamEnabled] : []);
|
},
|
||||||
const flag = await util.getMemoryFlag(input, features);
|
{
|
||||||
t.deepEqual(flag, expectedFlag);
|
input: "",
|
||||||
}
|
totalMemoryMb: 8 * 1024,
|
||||||
});
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 7 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 7 * 1024,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "512",
|
||||||
|
totalMemoryMb: 8 * 1024,
|
||||||
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 512,
|
||||||
|
expectedMemoryValueWithScaling: 512,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: undefined,
|
||||||
|
totalMemoryMb: 64 * 1024,
|
||||||
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 63 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 63078, // Math.floor(1024 * (64 - 1 - 0.025 * (64 - 8)))
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: undefined,
|
||||||
|
totalMemoryMb: 64 * 1024,
|
||||||
|
platform: "win32",
|
||||||
|
expectedMemoryValue: 62.5 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 62566, // Math.floor(1024 * (64 - 1.5 - 0.025 * (64 - 8)))
|
||||||
|
},
|
||||||
|
];
|
||||||
|
for (const { input, totalMemoryMb, platform, expectedMemoryValue, expectedMemoryValueWithScaling, } of GET_MEMORY_FLAG_TESTS) {
|
||||||
|
(0, ava_1.default)(`Memory flag value is ${expectedMemoryValue} without scaling and ${expectedMemoryValueWithScaling} with scaling ` +
|
||||||
|
`for ${input ?? "no user input"} on ${platform} with ${totalMemoryMb} MB total system RAM`, async (t) => {
|
||||||
|
for (const withScaling of [true, false]) {
|
||||||
|
const flag = util.getMemoryFlagValueForPlatform(input, totalMemoryMb * 1024 * 1024, platform, withScaling);
|
||||||
|
t.deepEqual(flag, withScaling ? expectedMemoryValueWithScaling : expectedMemoryValue);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
(0, ava_1.default)("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
(0, ava_1.default)("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
||||||
for (const input of ["-1", "hello!"]) {
|
for (const input of ["-1", "hello!"]) {
|
||||||
await t.throwsAsync(async () => await util.getMemoryFlag(input, (0, testing_utils_1.createFeatures)([])));
|
t.throws(() => util.getMemoryFlag(input, false));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getAddSnippetsFlag() should return the correct flag", (t) => {
|
(0, ava_1.default)("getAddSnippetsFlag() should return the correct flag", (t) => {
|
||||||
@@ -146,120 +176,6 @@ const util = __importStar(require("./util"));
|
|||||||
t.is(util.apiVersionInRange("1.32.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_NEW);
|
t.is(util.apiVersionInRange("1.32.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_NEW);
|
||||||
t.is(util.apiVersionInRange("2.1.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_OLD);
|
t.is(util.apiVersionInRange("2.1.0", "1.33", "2.0"), util.DisallowedAPIVersionReason.ACTION_TOO_OLD);
|
||||||
});
|
});
|
||||||
function mockGetMetaVersionHeader(versionHeader) {
|
|
||||||
// Passing an auth token is required, so we just use a dummy value
|
|
||||||
const client = github.getOctokit("123");
|
|
||||||
const response = {
|
|
||||||
headers: {
|
|
||||||
"x-github-enterprise-version": versionHeader,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const spyGetContents = sinon
|
|
||||||
.stub(client.rest.meta, "get")
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
|
||||||
.resolves(response);
|
|
||||||
sinon.stub(api, "getApiClient").value(() => client);
|
|
||||||
return spyGetContents;
|
|
||||||
}
|
|
||||||
(0, ava_1.default)("getGitHubVersion", async (t) => {
|
|
||||||
const v = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://github.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
|
||||||
mockGetMetaVersionHeader("2.0");
|
|
||||||
const v2 = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://ghe.example.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHES, version: "2.0" }, v2);
|
|
||||||
mockGetMetaVersionHeader("GitHub AE");
|
|
||||||
const ghae = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://example.githubenterprise.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
|
||||||
mockGetMetaVersionHeader(undefined);
|
|
||||||
const v3 = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://ghe.example.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
|
||||||
mockGetMetaVersionHeader("ghe.com");
|
|
||||||
const gheDotcom = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://foo.ghe.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
|
||||||
});
|
|
||||||
const ML_POWERED_JS_STATUS_TESTS = [
|
|
||||||
// If no packs are loaded, status is false.
|
|
||||||
[[], "false"],
|
|
||||||
// If another pack is loaded but not the ML-powered query pack, status is false.
|
|
||||||
[["some-other/pack"], "false"],
|
|
||||||
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
|
||||||
[[`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
|
||||||
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
|
||||||
// status is the version of the ML-powered query pack.
|
|
||||||
[
|
|
||||||
["some-other/pack", `${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`],
|
|
||||||
"~0.1.0",
|
|
||||||
],
|
|
||||||
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
|
||||||
[[util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
|
||||||
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
|
||||||
[
|
|
||||||
[
|
|
||||||
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.1`,
|
|
||||||
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.2`,
|
|
||||||
],
|
|
||||||
"other",
|
|
||||||
],
|
|
||||||
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
|
||||||
// the status is "latest".
|
|
||||||
[["some-other/pack", util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
|
||||||
];
|
|
||||||
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|
||||||
const packDescriptions = `[${packs
|
|
||||||
.map((pack) => JSON.stringify(pack))
|
|
||||||
.join(", ")}]`;
|
|
||||||
(0, ava_1.default)(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
|
|
||||||
return util.withTmpDir(async (tmpDir) => {
|
|
||||||
const config = {
|
|
||||||
languages: [],
|
|
||||||
queries: {},
|
|
||||||
paths: [],
|
|
||||||
pathsIgnore: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: {
|
|
||||||
type: util.GitHubVariant.DOTCOM,
|
|
||||||
},
|
|
||||||
dbLocation: "",
|
|
||||||
packs: {
|
|
||||||
javascript: packs,
|
|
||||||
},
|
|
||||||
debugMode: false,
|
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
|
||||||
augmentationProperties: {
|
|
||||||
injectedMlQueries: false,
|
|
||||||
packsInputCombines: false,
|
|
||||||
queriesInputCombines: false,
|
|
||||||
},
|
|
||||||
trapCaches: {},
|
|
||||||
trapCacheDownloadTime: 0,
|
|
||||||
};
|
|
||||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
(0, ava_1.default)("doesDirectoryExist", async (t) => {
|
(0, ava_1.default)("doesDirectoryExist", async (t) => {
|
||||||
// Returns false if no file/dir of this name exists
|
// Returns false if no file/dir of this name exists
|
||||||
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
53
lib/workflow.js
generated
53
lib/workflow.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunAttempt = exports.getWorkflowRunID = exports.getWorkflowRelativePath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const zlib_1 = __importDefault(require("zlib"));
|
const zlib_1 = __importDefault(require("zlib"));
|
||||||
@@ -185,7 +185,7 @@ exports.getWorkflow = getWorkflow;
|
|||||||
* Get the absolute path of the currently executing workflow.
|
* Get the absolute path of the currently executing workflow.
|
||||||
*/
|
*/
|
||||||
async function getWorkflowAbsolutePath(logger) {
|
async function getWorkflowAbsolutePath(logger) {
|
||||||
const relativePath = await getWorkflowRelativePath();
|
const relativePath = await api.getWorkflowRelativePath();
|
||||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||||
if (fs.existsSync(absolutePath)) {
|
if (fs.existsSync(absolutePath)) {
|
||||||
logger.debug(`Derived the following absolute path for the currently executing workflow: ${absolutePath}.`);
|
logger.debug(`Derived the following absolute path for the currently executing workflow: ${absolutePath}.`);
|
||||||
@@ -195,55 +195,6 @@ async function getWorkflowAbsolutePath(logger) {
|
|||||||
"This can happen if the currently running workflow checks out a branch that doesn't contain " +
|
"This can happen if the currently running workflow checks out a branch that doesn't contain " +
|
||||||
"the corresponding workflow file.");
|
"the corresponding workflow file.");
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow relative to the repository root.
|
|
||||||
*/
|
|
||||||
async function getWorkflowRelativePath() {
|
|
||||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
});
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRelativePath = getWorkflowRelativePath;
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
function getWorkflowRunID() {
|
|
||||||
const workflowRunIdString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID");
|
|
||||||
const workflowRunID = parseInt(workflowRunIdString, 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error(`GITHUB_RUN_ID must define a non NaN workflow run ID. Current value is ${workflowRunIdString}`);
|
|
||||||
}
|
|
||||||
if (workflowRunID < 0) {
|
|
||||||
throw new Error(`GITHUB_RUN_ID must be a non-negative integer. Current value is ${workflowRunIdString}`);
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
|
||||||
/**
|
|
||||||
* Get the workflow run attempt number.
|
|
||||||
*/
|
|
||||||
function getWorkflowRunAttempt() {
|
|
||||||
const workflowRunAttemptString = (0, util_1.getRequiredEnvParam)("GITHUB_RUN_ATTEMPT");
|
|
||||||
const workflowRunAttempt = parseInt(workflowRunAttemptString, 10);
|
|
||||||
if (Number.isNaN(workflowRunAttempt)) {
|
|
||||||
throw new Error(`GITHUB_RUN_ATTEMPT must define a non NaN workflow run attempt. Current value is ${workflowRunAttemptString}`);
|
|
||||||
}
|
|
||||||
if (workflowRunAttempt <= 0) {
|
|
||||||
throw new Error(`GITHUB_RUN_ATTEMPT must be a positive integer. Current value is ${workflowRunAttemptString}`);
|
|
||||||
}
|
|
||||||
return workflowRunAttempt;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRunAttempt = getWorkflowRunAttempt;
|
|
||||||
function getStepsCallingAction(job, actionName) {
|
function getStepsCallingAction(job, actionName) {
|
||||||
if (job.uses) {
|
if (job.uses) {
|
||||||
throw new Error(`Could not get steps calling ${actionName} since the job calls a reusable workflow.`);
|
throw new Error(`Could not get steps calling ${actionName} since the job calls a reusable workflow.`);
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.21.0",
|
"version": "2.21.1",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
|
|||||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.21.0",
|
"version": "2.21.1",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.21.0",
|
"version": "2.21.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^1.1.1",
|
"@actions/artifact": "^1.1.1",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.21.0",
|
"version": "2.21.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "CodeQL action",
|
"description": "CodeQL action",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ import * as path from "path";
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsutil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
|
import { computeAutomationID, createStatusReportBase } from "./api-client";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||||
import { initializeEnvironment, withTmpDir } from "./util";
|
import { initializeEnvironment, withTmpDir } from "./util";
|
||||||
@@ -13,7 +14,7 @@ setupTests(test);
|
|||||||
|
|
||||||
test("getRef() throws on the empty string", async (t) => {
|
test("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
await t.throwsAsync(actionsutil.getRef);
|
await t.throwsAsync(actionsUtil.getRef);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
|
||||||
@@ -24,10 +25,10 @@ test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t)
|
|||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("HEAD").resolves(currentSha);
|
callback.withArgs("HEAD").resolves(currentSha);
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -41,11 +42,11 @@ test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has
|
|||||||
process.env["GITHUB_SHA"] = "b".repeat(40);
|
process.env["GITHUB_SHA"] = "b".repeat(40);
|
||||||
const sha = "a".repeat(40);
|
const sha = "a".repeat(40);
|
||||||
|
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
|
||||||
callback.withArgs("HEAD").resolves(sha);
|
callback.withArgs("HEAD").resolves(sha);
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -57,11 +58,11 @@ test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (
|
|||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
|
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
|
||||||
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/1/head");
|
t.deepEqual(actualRef, "refs/pull/1/head");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
});
|
});
|
||||||
@@ -70,7 +71,7 @@ test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (
|
|||||||
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
|
||||||
await withTmpDir(async (tmpDir: string) => {
|
await withTmpDir(async (tmpDir: string) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
|
||||||
|
|
||||||
@@ -78,11 +79,11 @@ test("getRef() returns ref provided as an input and ignores current HEAD", async
|
|||||||
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
process.env["GITHUB_REF"] = "refs/pull/1/merge";
|
||||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||||
|
|
||||||
const callback = sinon.stub(actionsutil, "getCommitOid");
|
const callback = sinon.stub(actionsUtil, "getCommitOid");
|
||||||
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
|
||||||
callback.withArgs("HEAD").resolves("b".repeat(40));
|
callback.withArgs("HEAD").resolves("b".repeat(40));
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, "refs/pull/2/merge");
|
t.deepEqual(actualRef, "refs/pull/2/merge");
|
||||||
callback.restore();
|
callback.restore();
|
||||||
getAdditionalInputStub.restore();
|
getAdditionalInputStub.restore();
|
||||||
@@ -98,7 +99,7 @@ test("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t
|
|||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -112,7 +113,7 @@ test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided",
|
|||||||
process.env["GITHUB_REF"] = expectedRef;
|
process.env["GITHUB_REF"] = expectedRef;
|
||||||
process.env["GITHUB_SHA"] = currentSha;
|
process.env["GITHUB_SHA"] = currentSha;
|
||||||
|
|
||||||
const actualRef = await actionsutil.getRef();
|
const actualRef = await actionsUtil.getRef();
|
||||||
t.deepEqual(actualRef, expectedRef);
|
t.deepEqual(actualRef, expectedRef);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -120,12 +121,12 @@ test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided",
|
|||||||
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
|
||||||
await withTmpDir(async (tmpDir: string) => {
|
await withTmpDir(async (tmpDir: string) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
|
||||||
|
|
||||||
await t.throwsAsync(
|
await t.throwsAsync(
|
||||||
async () => {
|
async () => {
|
||||||
await actionsutil.getRef();
|
await actionsUtil.getRef();
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
@@ -141,12 +142,12 @@ test("getRef() throws an error if only `sha` is provided as an input", async (t)
|
|||||||
await withTmpDir(async (tmpDir: string) => {
|
await withTmpDir(async (tmpDir: string) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
process.env["GITHUB_WORKSPACE"] = "/tmp";
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
|
||||||
|
|
||||||
await t.throwsAsync(
|
await t.throwsAsync(
|
||||||
async () => {
|
async () => {
|
||||||
await actionsutil.getRef();
|
await actionsUtil.getRef();
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
instanceOf: Error,
|
instanceOf: Error,
|
||||||
@@ -159,7 +160,7 @@ test("getRef() throws an error if only `sha` is provided as an input", async (t)
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("computeAutomationID()", async (t) => {
|
test("computeAutomationID()", async (t) => {
|
||||||
let actualAutomationID = actionsutil.computeAutomationID(
|
let actualAutomationID = computeAutomationID(
|
||||||
".github/workflows/codeql-analysis.yml:analyze",
|
".github/workflows/codeql-analysis.yml:analyze",
|
||||||
'{"language": "javascript", "os": "linux"}'
|
'{"language": "javascript", "os": "linux"}'
|
||||||
);
|
);
|
||||||
@@ -169,7 +170,7 @@ test("computeAutomationID()", async (t) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// check the environment sorting
|
// check the environment sorting
|
||||||
actualAutomationID = actionsutil.computeAutomationID(
|
actualAutomationID = computeAutomationID(
|
||||||
".github/workflows/codeql-analysis.yml:analyze",
|
".github/workflows/codeql-analysis.yml:analyze",
|
||||||
'{"os": "linux", "language": "javascript"}'
|
'{"os": "linux", "language": "javascript"}'
|
||||||
);
|
);
|
||||||
@@ -179,7 +180,7 @@ test("computeAutomationID()", async (t) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// check that an empty environment produces the right results
|
// check that an empty environment produces the right results
|
||||||
actualAutomationID = actionsutil.computeAutomationID(
|
actualAutomationID = computeAutomationID(
|
||||||
".github/workflows/codeql-analysis.yml:analyze",
|
".github/workflows/codeql-analysis.yml:analyze",
|
||||||
"{}"
|
"{}"
|
||||||
);
|
);
|
||||||
@@ -189,7 +190,7 @@ test("computeAutomationID()", async (t) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// check non string environment values
|
// check non string environment values
|
||||||
actualAutomationID = actionsutil.computeAutomationID(
|
actualAutomationID = computeAutomationID(
|
||||||
".github/workflows/codeql-analysis.yml:analyze",
|
".github/workflows/codeql-analysis.yml:analyze",
|
||||||
'{"number": 1, "object": {"language": "javascript"}}'
|
'{"number": 1, "object": {"language": "javascript"}}'
|
||||||
);
|
);
|
||||||
@@ -199,7 +200,7 @@ test("computeAutomationID()", async (t) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// check undefined environment
|
// check undefined environment
|
||||||
actualAutomationID = actionsutil.computeAutomationID(
|
actualAutomationID = computeAutomationID(
|
||||||
".github/workflows/codeql-analysis.yml:analyze",
|
".github/workflows/codeql-analysis.yml:analyze",
|
||||||
undefined
|
undefined
|
||||||
);
|
);
|
||||||
@@ -217,7 +218,7 @@ test("initializeEnvironment", (t) => {
|
|||||||
test("isAnalyzingDefaultBranch()", async (t) => {
|
test("isAnalyzingDefaultBranch()", async (t) => {
|
||||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "false";
|
||||||
|
|
||||||
await withTmpDir(async (tmpDir) => {
|
await withTmpDir(async (tmpDir) => {
|
||||||
@@ -235,13 +236,13 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||||||
|
|
||||||
process.env["GITHUB_REF"] = "main";
|
process.env["GITHUB_REF"] = "main";
|
||||||
process.env["GITHUB_SHA"] = "1234";
|
process.env["GITHUB_SHA"] = "1234";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
|
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
|
|
||||||
process.env["GITHUB_REF"] = "feature";
|
process.env["GITHUB_REF"] = "feature";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||||
|
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
envFile,
|
envFile,
|
||||||
@@ -251,9 +252,9 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||||||
);
|
);
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), true);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), true);
|
||||||
|
|
||||||
const getAdditionalInputStub = sinon.stub(actionsutil, "getOptionalInput");
|
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
getAdditionalInputStub
|
getAdditionalInputStub
|
||||||
.withArgs("ref")
|
.withArgs("ref")
|
||||||
.resolves("refs/heads/something-else");
|
.resolves("refs/heads/something-else");
|
||||||
@@ -262,7 +263,7 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||||||
.resolves("0000000000000000000000000000000000000000");
|
.resolves("0000000000000000000000000000000000000000");
|
||||||
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
process.env["GITHUB_EVENT_NAME"] = "schedule";
|
||||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
t.deepEqual(await actionsUtil.isAnalyzingDefaultBranch(), false);
|
||||||
getAdditionalInputStub.restore();
|
getAdditionalInputStub.restore();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -279,10 +280,10 @@ test("createStatusReportBase", async (t) => {
|
|||||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||||
process.env["RUNNER_OS"] = "macOS";
|
process.env["RUNNER_OS"] = "macOS";
|
||||||
|
|
||||||
const getRequiredInput = sinon.stub(actionsutil, "getRequiredInput");
|
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||||
|
|
||||||
const statusReport = await actionsutil.createStatusReportBase(
|
const statusReport = await createStatusReportBase(
|
||||||
"init",
|
"init",
|
||||||
"failure",
|
"failure",
|
||||||
new Date("May 19, 2023 05:19:00"),
|
new Date("May 19, 2023 05:19:00"),
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as os from "os";
|
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
@@ -7,25 +6,13 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
|||||||
import * as safeWhich from "@chrisgavin/safe-which";
|
import * as safeWhich from "@chrisgavin/safe-which";
|
||||||
import { JSONSchemaForNPMPackageJsonFiles } from "@schemastore/package";
|
import { JSONSchemaForNPMPackageJsonFiles } from "@schemastore/package";
|
||||||
|
|
||||||
import * as api from "./api-client";
|
import type { Config } from "./config-utils";
|
||||||
import { Config } from "./config-utils";
|
|
||||||
import { EnvVar } from "./environment";
|
|
||||||
import {
|
import {
|
||||||
doesDirectoryExist,
|
doesDirectoryExist,
|
||||||
getCachedCodeQlVersion,
|
|
||||||
getCodeQLDatabasePath,
|
getCodeQLDatabasePath,
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
GITHUB_DOTCOM_URL,
|
|
||||||
isHTTPError,
|
|
||||||
isInTestMode,
|
|
||||||
parseMatrixInput,
|
|
||||||
UserError,
|
UserError,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
import {
|
|
||||||
getWorkflowRunID,
|
|
||||||
getWorkflowRunAttempt,
|
|
||||||
getWorkflowRelativePath,
|
|
||||||
} from "./workflow";
|
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json") as JSONSchemaForNPMPackageJsonFiles;
|
const pkg = require("../package.json") as JSONSchemaForNPMPackageJsonFiles;
|
||||||
@@ -163,59 +150,6 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the analysis key parameter for the current job.
|
|
||||||
*
|
|
||||||
* This will combine the workflow path and current job name.
|
|
||||||
* Computing this the first time requires making requests to
|
|
||||||
* the GitHub API, but after that the result will be cached.
|
|
||||||
*/
|
|
||||||
export async function getAnalysisKey(): Promise<string> {
|
|
||||||
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
|
||||||
|
|
||||||
let analysisKey = process.env[analysisKeyEnvVar];
|
|
||||||
if (analysisKey !== undefined) {
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowPath = await getWorkflowRelativePath();
|
|
||||||
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
|
||||||
|
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
|
||||||
return analysisKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getAutomationID(): Promise<string> {
|
|
||||||
const analysis_key = await getAnalysisKey();
|
|
||||||
const environment = getRequiredInput("matrix");
|
|
||||||
|
|
||||||
return computeAutomationID(analysis_key, environment);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function computeAutomationID(
|
|
||||||
analysis_key: string,
|
|
||||||
environment: string | undefined
|
|
||||||
): string {
|
|
||||||
let automationID = `${analysis_key}/`;
|
|
||||||
|
|
||||||
const matrix = parseMatrixInput(environment);
|
|
||||||
if (matrix !== undefined) {
|
|
||||||
// the id has to be deterministic so we sort the fields
|
|
||||||
for (const entry of Object.entries(matrix).sort()) {
|
|
||||||
if (typeof entry[1] === "string") {
|
|
||||||
automationID += `${entry[0]}:${entry[1]}/`;
|
|
||||||
} else {
|
|
||||||
// In code scanning we just handle the string values,
|
|
||||||
// the rest get converted to the empty string
|
|
||||||
automationID += `${entry[0]}:/`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return automationID;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the ref currently being analyzed.
|
* Get the ref currently being analyzed.
|
||||||
*/
|
*/
|
||||||
@@ -301,7 +235,7 @@ function getRefFromEnv(): string {
|
|||||||
return refEnv;
|
return refEnv;
|
||||||
}
|
}
|
||||||
|
|
||||||
type ActionName =
|
export type ActionName =
|
||||||
| "init"
|
| "init"
|
||||||
| "autobuild"
|
| "autobuild"
|
||||||
| "finish"
|
| "finish"
|
||||||
@@ -425,187 +359,6 @@ export function getActionVersion(): string {
|
|||||||
return pkg.version!;
|
return pkg.version!;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Compose a StatusReport.
|
|
||||||
*
|
|
||||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
|
||||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
|
||||||
* @param startedAt The time this action started executing.
|
|
||||||
* @param cause Cause of failure (only supply if status is 'failure')
|
|
||||||
* @param exception Exception (only supply if status is 'failure')
|
|
||||||
*/
|
|
||||||
export async function createStatusReportBase(
|
|
||||||
actionName: ActionName,
|
|
||||||
status: ActionStatus,
|
|
||||||
actionStartedAt: Date,
|
|
||||||
cause?: string,
|
|
||||||
exception?: string
|
|
||||||
): Promise<StatusReportBase> {
|
|
||||||
const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
|
||||||
const ref = await getRef();
|
|
||||||
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
|
||||||
const workflowRunID = getWorkflowRunID();
|
|
||||||
const workflowRunAttempt = getWorkflowRunAttempt();
|
|
||||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
|
||||||
const jobName = process.env["GITHUB_JOB"] || "";
|
|
||||||
const analysis_key = await getAnalysisKey();
|
|
||||||
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
|
||||||
if (workflowStartedAt === undefined) {
|
|
||||||
workflowStartedAt = actionStartedAt.toISOString();
|
|
||||||
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
|
||||||
}
|
|
||||||
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
|
||||||
const codeQlCliVersion = getCachedCodeQlVersion();
|
|
||||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
|
||||||
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
|
||||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
|
||||||
// even if it was only set for this step
|
|
||||||
if (testingEnvironment !== "") {
|
|
||||||
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
|
||||||
}
|
|
||||||
|
|
||||||
const statusReport: StatusReportBase = {
|
|
||||||
job_run_uuid: jobRunUUID,
|
|
||||||
workflow_run_id: workflowRunID,
|
|
||||||
workflow_run_attempt: workflowRunAttempt,
|
|
||||||
workflow_name: workflowName,
|
|
||||||
job_name: jobName,
|
|
||||||
analysis_key,
|
|
||||||
commit_oid: commitOid,
|
|
||||||
ref,
|
|
||||||
action_name: actionName,
|
|
||||||
action_ref: actionRef,
|
|
||||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
|
||||||
started_at: workflowStartedAt,
|
|
||||||
action_started_at: actionStartedAt.toISOString(),
|
|
||||||
status,
|
|
||||||
testing_environment: testingEnvironment,
|
|
||||||
runner_os: runnerOs,
|
|
||||||
action_version: getActionVersion(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Add optional parameters
|
|
||||||
if (cause) {
|
|
||||||
statusReport.cause = cause;
|
|
||||||
}
|
|
||||||
if (exception) {
|
|
||||||
statusReport.exception = exception;
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
status === "success" ||
|
|
||||||
status === "failure" ||
|
|
||||||
status === "aborted" ||
|
|
||||||
status === "user-error"
|
|
||||||
) {
|
|
||||||
statusReport.completed_at = new Date().toISOString();
|
|
||||||
}
|
|
||||||
const matrix = getRequiredInput("matrix");
|
|
||||||
if (matrix) {
|
|
||||||
statusReport.matrix_vars = matrix;
|
|
||||||
}
|
|
||||||
if ("RUNNER_ARCH" in process.env) {
|
|
||||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
|
||||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
|
||||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
|
||||||
}
|
|
||||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
|
||||||
statusReport.runner_os_release = os.release();
|
|
||||||
}
|
|
||||||
if (codeQlCliVersion !== undefined) {
|
|
||||||
statusReport.codeql_version = codeQlCliVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
return statusReport;
|
|
||||||
}
|
|
||||||
|
|
||||||
const GENERIC_403_MSG =
|
|
||||||
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
|
||||||
const GENERIC_404_MSG =
|
|
||||||
"Not authorized to use the CodeQL code scanning feature on this repo.";
|
|
||||||
const OUT_OF_DATE_MSG =
|
|
||||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
|
||||||
const INCOMPATIBLE_MSG =
|
|
||||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
|
||||||
*
|
|
||||||
* Optionally checks the response from the API endpoint and sets the action
|
|
||||||
* as failed if the status report failed. This is only expected to be used
|
|
||||||
* when sending a 'starting' report.
|
|
||||||
*
|
|
||||||
* Returns whether sending the status report was successful of not.
|
|
||||||
*/
|
|
||||||
export async function sendStatusReport<S extends StatusReportBase>(
|
|
||||||
statusReport: S
|
|
||||||
): Promise<boolean> {
|
|
||||||
const statusReportJSON = JSON.stringify(statusReport);
|
|
||||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
|
||||||
// If in test mode we don't want to upload the results
|
|
||||||
if (isInTestMode()) {
|
|
||||||
core.debug("In test mode. Status reports are not uploaded.");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
|
||||||
const [owner, repo] = nwo.split("/");
|
|
||||||
const client = api.getApiClient();
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.request(
|
|
||||||
"PUT /repos/:owner/:repo/code-scanning/analysis/status",
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
data: statusReportJSON,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
} catch (e) {
|
|
||||||
console.log(e);
|
|
||||||
if (isHTTPError(e)) {
|
|
||||||
switch (e.status) {
|
|
||||||
case 403:
|
|
||||||
if (
|
|
||||||
getWorkflowEventName() === "push" &&
|
|
||||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]"
|
|
||||||
) {
|
|
||||||
core.setFailed(
|
|
||||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
|
||||||
"Uploading Code Scanning results requires write access. " +
|
|
||||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
|
||||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events."
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
core.setFailed(e.message || GENERIC_403_MSG);
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
case 404:
|
|
||||||
core.setFailed(GENERIC_404_MSG);
|
|
||||||
return false;
|
|
||||||
case 422:
|
|
||||||
// schema incompatibility when reporting status
|
|
||||||
// this means that this action version is no longer compatible with the API
|
|
||||||
// we still want to continue as it is likely the analysis endpoint will work
|
|
||||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
|
||||||
core.debug(INCOMPATIBLE_MSG);
|
|
||||||
} else {
|
|
||||||
core.debug(OUT_OF_DATE_MSG);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// something else has gone wrong and the request/response will be logged by octokit
|
|
||||||
// it's possible this is a transient error and we should continue scanning
|
|
||||||
core.error(
|
|
||||||
"An unexpected error occurred when sending code scanning status report."
|
|
||||||
);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the name of the event that triggered this workflow.
|
* Returns the name of the event that triggered this workflow.
|
||||||
*
|
*
|
||||||
@@ -734,3 +487,41 @@ export function getUploadValue(input: string | undefined): UploadKind {
|
|||||||
return "always";
|
return "always";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
export function getWorkflowRunID(): number {
|
||||||
|
const workflowRunIdString = getRequiredEnvParam("GITHUB_RUN_ID");
|
||||||
|
const workflowRunID = parseInt(workflowRunIdString, 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error(
|
||||||
|
`GITHUB_RUN_ID must define a non NaN workflow run ID. Current value is ${workflowRunIdString}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (workflowRunID < 0) {
|
||||||
|
throw new Error(
|
||||||
|
`GITHUB_RUN_ID must be a non-negative integer. Current value is ${workflowRunIdString}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the workflow run attempt number.
|
||||||
|
*/
|
||||||
|
export function getWorkflowRunAttempt(): number {
|
||||||
|
const workflowRunAttemptString = getRequiredEnvParam("GITHUB_RUN_ATTEMPT");
|
||||||
|
const workflowRunAttempt = parseInt(workflowRunAttemptString, 10);
|
||||||
|
if (Number.isNaN(workflowRunAttempt)) {
|
||||||
|
throw new Error(
|
||||||
|
`GITHUB_RUN_ATTEMPT must define a non NaN workflow run attempt. Current value is ${workflowRunAttemptString}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (workflowRunAttempt <= 0) {
|
||||||
|
throw new Error(
|
||||||
|
`GITHUB_RUN_ATTEMPT must be a positive integer. Current value is ${workflowRunAttemptString}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return workflowRunAttempt;
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as sinon from "sinon";
|
|||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import * as analyze from "./analyze";
|
import * as analyze from "./analyze";
|
||||||
|
import * as api from "./api-client";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import {
|
import {
|
||||||
setupTests,
|
setupTests,
|
||||||
@@ -26,9 +27,9 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
|||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(api, "createStatusReportBase")
|
||||||
.resolves({} as actionsUtil.StatusReportBase);
|
.resolves({} as actionsUtil.StatusReportBase);
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
|
|
||||||
const gitHubVersion: util.GitHubVersion = {
|
const gitHubVersion: util.GitHubVersion = {
|
||||||
@@ -46,7 +47,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
|||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
optionalInputStub.withArgs("expect-error").returns("false");
|
optionalInputStub.withArgs("expect-error").returns("false");
|
||||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
mockFeatureFlagApiEndpoint(200, {});
|
mockFeatureFlagApiEndpoint(200, {});
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as sinon from "sinon";
|
|||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import * as analyze from "./analyze";
|
import * as analyze from "./analyze";
|
||||||
|
import * as api from "./api-client";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import {
|
import {
|
||||||
setupTests,
|
setupTests,
|
||||||
@@ -26,9 +27,9 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
|||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||||
sinon
|
sinon
|
||||||
.stub(actionsUtil, "createStatusReportBase")
|
.stub(api, "createStatusReportBase")
|
||||||
.resolves({} as actionsUtil.StatusReportBase);
|
.resolves({} as actionsUtil.StatusReportBase);
|
||||||
sinon.stub(actionsUtil, "sendStatusReport").resolves(true);
|
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||||
const gitHubVersion: util.GitHubVersion = {
|
const gitHubVersion: util.GitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
};
|
};
|
||||||
@@ -44,7 +45,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
|||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||||
optionalInputStub.withArgs("cleanup-level").returns("none");
|
optionalInputStub.withArgs("cleanup-level").returns("none");
|
||||||
optionalInputStub.withArgs("expect-error").returns("false");
|
optionalInputStub.withArgs("expect-error").returns("false");
|
||||||
sinon.stub(util, "getGitHubVersion").resolves(gitHubVersion);
|
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
|
||||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
mockFeatureFlagApiEndpoint(200, {});
|
mockFeatureFlagApiEndpoint(200, {});
|
||||||
|
|||||||
@@ -15,12 +15,13 @@ import {
|
|||||||
runQueries,
|
runQueries,
|
||||||
} from "./analyze";
|
} from "./analyze";
|
||||||
import { getApiDetails, getGitHubVersion } from "./api-client";
|
import { getApiDetails, getGitHubVersion } from "./api-client";
|
||||||
|
import * as api from "./api-client";
|
||||||
import { runAutobuild } from "./autobuild";
|
import { runAutobuild } from "./autobuild";
|
||||||
import { getCodeQL } from "./codeql";
|
import { getCodeQL } from "./codeql";
|
||||||
import { Config, getConfig } from "./config-utils";
|
import { Config, getConfig, getMlPoweredJsQueriesStatus } from "./config-utils";
|
||||||
import { uploadDatabases } from "./database-upload";
|
import { uploadDatabases } from "./database-upload";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { Features } from "./feature-flags";
|
import { Feature, Features } from "./feature-flags";
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { getActionsLogger, Logger } from "./logging";
|
import { getActionsLogger, Logger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import { parseRepositoryNwo } from "./repository";
|
||||||
@@ -60,7 +61,7 @@ export async function sendStatusReport(
|
|||||||
error,
|
error,
|
||||||
stats?.analyze_failure_language
|
stats?.analyze_failure_language
|
||||||
);
|
);
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase(
|
const statusReportBase = await api.createStatusReportBase(
|
||||||
"finish",
|
"finish",
|
||||||
status,
|
status,
|
||||||
startedAt,
|
startedAt,
|
||||||
@@ -71,8 +72,7 @@ export async function sendStatusReport(
|
|||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...(config
|
...(config
|
||||||
? {
|
? {
|
||||||
ml_powered_javascript_queries:
|
ml_powered_javascript_queries: getMlPoweredJsQueriesStatus(config),
|
||||||
util.getMlPoweredJsQueriesStatus(config),
|
|
||||||
}
|
}
|
||||||
: {}),
|
: {}),
|
||||||
...(stats || {}),
|
...(stats || {}),
|
||||||
@@ -86,9 +86,9 @@ export async function sendStatusReport(
|
|||||||
await getTotalCacheSize(config.trapCaches, logger)
|
await getTotalCacheSize(config.trapCaches, logger)
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(trapCacheUploadStatusReport);
|
await api.sendStatusReport(trapCacheUploadStatusReport);
|
||||||
} else {
|
} else {
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await api.sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -181,12 +181,8 @@ async function run() {
|
|||||||
const logger = getActionsLogger();
|
const logger = getActionsLogger();
|
||||||
try {
|
try {
|
||||||
if (
|
if (
|
||||||
!(await actionsUtil.sendStatusReport(
|
!(await api.sendStatusReport(
|
||||||
await actionsUtil.createStatusReportBase(
|
await api.createStatusReportBase("finish", "starting", startedAt)
|
||||||
"finish",
|
|
||||||
"starting",
|
|
||||||
startedAt
|
|
||||||
)
|
|
||||||
))
|
))
|
||||||
) {
|
) {
|
||||||
return;
|
return;
|
||||||
@@ -224,9 +220,9 @@ async function run() {
|
|||||||
logger
|
logger
|
||||||
);
|
);
|
||||||
|
|
||||||
const memory = await util.getMemoryFlag(
|
const memory = util.getMemoryFlag(
|
||||||
actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"],
|
actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"],
|
||||||
features
|
await features.getValue(Feature.ScalingReservedRamEnabled)
|
||||||
);
|
);
|
||||||
|
|
||||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||||
|
|||||||
@@ -10,7 +10,12 @@ import { DatabaseCreationTimings, EventReport } from "./actions-util";
|
|||||||
import * as analysisPaths from "./analysis-paths";
|
import * as analysisPaths from "./analysis-paths";
|
||||||
import { CodeQL, getCodeQL } from "./codeql";
|
import { CodeQL, getCodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
import {
|
||||||
|
FeatureEnablement,
|
||||||
|
Feature,
|
||||||
|
logCodeScanningConfigInCli,
|
||||||
|
useCodeScanningConfigInCli,
|
||||||
|
} from "./feature-flags";
|
||||||
import { isScannedLanguage, Language } from "./languages";
|
import { isScannedLanguage, Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { endTracingForCluster } from "./tracer-config";
|
import { endTracingForCluster } from "./tracer-config";
|
||||||
@@ -235,7 +240,7 @@ export async function runQueries(
|
|||||||
const codeql = await getCodeQL(config.codeQLCmd);
|
const codeql = await getCodeQL(config.codeQLCmd);
|
||||||
const queryFlags = [memoryFlag, threadsFlag];
|
const queryFlags = [memoryFlag, threadsFlag];
|
||||||
|
|
||||||
await util.logCodeScanningConfigInCli(codeql, features, logger);
|
await logCodeScanningConfigInCli(codeql, features, logger);
|
||||||
|
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
@@ -248,7 +253,7 @@ export async function runQueries(
|
|||||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||||
let startTimeInterpretResults: Date;
|
let startTimeInterpretResults: Date;
|
||||||
let endTimeInterpretResults: Date;
|
let endTimeInterpretResults: Date;
|
||||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||||
// If we are using the code scanning config in the CLI,
|
// If we are using the code scanning config in the CLI,
|
||||||
// much of the work needed to generate the query suites
|
// much of the work needed to generate the query suites
|
||||||
// is done in the CLI. We just need to make a single
|
// is done in the CLI. We just need to make a single
|
||||||
|
|||||||
@@ -1,25 +1,24 @@
|
|||||||
|
import * as github from "@actions/github";
|
||||||
import * as githubUtils from "@actions/github/lib/utils";
|
import * as githubUtils from "@actions/github/lib/utils";
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { getApiClient } from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { setupTests } from "./testing-utils";
|
import { setupTests } from "./testing-utils";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
let pluginStub: sinon.SinonStub;
|
|
||||||
let githubStub: sinon.SinonStub;
|
|
||||||
|
|
||||||
test.beforeEach(() => {
|
test.beforeEach(() => {
|
||||||
pluginStub = sinon.stub(githubUtils.GitHub, "plugin");
|
|
||||||
githubStub = sinon.stub();
|
|
||||||
pluginStub.returns(githubStub);
|
|
||||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getApiClient", async (t) => {
|
test("getApiClient", async (t) => {
|
||||||
|
const pluginStub: sinon.SinonStub = sinon.stub(githubUtils.GitHub, "plugin");
|
||||||
|
const githubStub: sinon.SinonStub = sinon.stub();
|
||||||
|
pluginStub.returns(githubStub);
|
||||||
|
|
||||||
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
|
sinon.stub(actionsUtil, "getRequiredInput").withArgs("token").returns("xyz");
|
||||||
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
const requiredEnvParamStub = sinon.stub(util, "getRequiredEnvParam");
|
||||||
requiredEnvParamStub
|
requiredEnvParamStub
|
||||||
@@ -29,7 +28,7 @@ test("getApiClient", async (t) => {
|
|||||||
.withArgs("GITHUB_API_URL")
|
.withArgs("GITHUB_API_URL")
|
||||||
.returns("http://api.github.localhost");
|
.returns("http://api.github.localhost");
|
||||||
|
|
||||||
getApiClient();
|
api.getApiClient();
|
||||||
|
|
||||||
t.assert(
|
t.assert(
|
||||||
githubStub.calledOnceWithExactly({
|
githubStub.calledOnceWithExactly({
|
||||||
@@ -40,3 +39,78 @@ test("getApiClient", async (t) => {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function mockGetMetaVersionHeader(
|
||||||
|
versionHeader: string | undefined
|
||||||
|
): sinon.SinonStub<any, any> {
|
||||||
|
// Passing an auth token is required, so we just use a dummy value
|
||||||
|
const client = github.getOctokit("123");
|
||||||
|
const response = {
|
||||||
|
headers: {
|
||||||
|
"x-github-enterprise-version": versionHeader,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const spyGetContents = sinon
|
||||||
|
.stub(client.rest.meta, "get")
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||||
|
.resolves(response as any);
|
||||||
|
sinon.stub(api, "getApiClient").value(() => client);
|
||||||
|
return spyGetContents;
|
||||||
|
}
|
||||||
|
|
||||||
|
test("getGitHubVersion for Dotcom", async (t) => {
|
||||||
|
const apiDetails = {
|
||||||
|
auth: "",
|
||||||
|
url: "https://github.com",
|
||||||
|
apiURL: "",
|
||||||
|
};
|
||||||
|
sinon.stub(api, "getApiDetails").returns(apiDetails);
|
||||||
|
const v = await api.getGitHubVersionFromApi(
|
||||||
|
github.getOctokit("123"),
|
||||||
|
apiDetails
|
||||||
|
);
|
||||||
|
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getGitHubVersion for GHES", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("2.0");
|
||||||
|
const v2 = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual(
|
||||||
|
{ type: util.GitHubVariant.GHES, version: "2.0" } as util.GitHubVersion,
|
||||||
|
v2
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getGitHubVersion for GHAE", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("GitHub AE");
|
||||||
|
const ghae = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://example.githubenterprise.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getGitHubVersion for different domain", async (t) => {
|
||||||
|
mockGetMetaVersionHeader(undefined);
|
||||||
|
const v3 = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://ghe.example.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getGitHubVersion for GHE_DOTCOM", async (t) => {
|
||||||
|
mockGetMetaVersionHeader("ghe.com");
|
||||||
|
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||||
|
auth: "",
|
||||||
|
url: "https://foo.ghe.com",
|
||||||
|
apiURL: undefined,
|
||||||
|
});
|
||||||
|
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||||
|
});
|
||||||
|
|||||||
@@ -1,10 +1,36 @@
|
|||||||
|
import * as os from "os";
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
import * as githubUtils from "@actions/github/lib/utils";
|
import * as githubUtils from "@actions/github/lib/utils";
|
||||||
import * as retry from "@octokit/plugin-retry";
|
import * as retry from "@octokit/plugin-retry";
|
||||||
import consoleLogLevel from "console-log-level";
|
import consoleLogLevel from "console-log-level";
|
||||||
|
|
||||||
import { getActionVersion, getRequiredInput } from "./actions-util";
|
import {
|
||||||
import * as util from "./util";
|
ActionName,
|
||||||
import { getRequiredEnvParam, GitHubVersion } from "./util";
|
ActionStatus,
|
||||||
|
StatusReportBase,
|
||||||
|
getActionVersion,
|
||||||
|
getOptionalInput,
|
||||||
|
getRef,
|
||||||
|
getRequiredInput,
|
||||||
|
getWorkflowEventName,
|
||||||
|
getWorkflowRunAttempt,
|
||||||
|
getWorkflowRunID,
|
||||||
|
} from "./actions-util";
|
||||||
|
import { EnvVar } from "./environment";
|
||||||
|
import {
|
||||||
|
getCachedCodeQlVersion,
|
||||||
|
getRequiredEnvParam,
|
||||||
|
GITHUB_DOTCOM_URL,
|
||||||
|
GitHubVariant,
|
||||||
|
GitHubVersion,
|
||||||
|
isHTTPError,
|
||||||
|
isInTestMode,
|
||||||
|
parseGitHubUrl,
|
||||||
|
parseMatrixInput,
|
||||||
|
} from "./util";
|
||||||
|
|
||||||
|
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||||
|
|
||||||
export enum DisallowedAPIVersionReason {
|
export enum DisallowedAPIVersionReason {
|
||||||
ACTION_TOO_OLD,
|
ACTION_TOO_OLD,
|
||||||
@@ -62,6 +88,37 @@ export function getApiClientWithExternalAuth(
|
|||||||
|
|
||||||
let cachedGitHubVersion: GitHubVersion | undefined = undefined;
|
let cachedGitHubVersion: GitHubVersion | undefined = undefined;
|
||||||
|
|
||||||
|
export async function getGitHubVersionFromApi(
|
||||||
|
apiClient: any,
|
||||||
|
apiDetails: GitHubApiDetails
|
||||||
|
): Promise<GitHubVersion> {
|
||||||
|
// We can avoid making an API request in the standard dotcom case
|
||||||
|
if (parseGitHubUrl(apiDetails.url) === GITHUB_DOTCOM_URL) {
|
||||||
|
return { type: GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Doesn't strictly have to be the meta endpoint as we're only
|
||||||
|
// using the response headers which are available on every request.
|
||||||
|
const response = await apiClient.rest.meta.get();
|
||||||
|
|
||||||
|
// This happens on dotcom, although we expect to have already returned in that
|
||||||
|
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
||||||
|
return { type: GitHubVariant.DOTCOM };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
||||||
|
return { type: GitHubVariant.GHAE };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
||||||
|
return { type: GitHubVariant.GHE_DOTCOM };
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] as string;
|
||||||
|
return { type: GitHubVariant.GHES, version };
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Report the GitHub server version. This is a wrapper around
|
* Report the GitHub server version. This is a wrapper around
|
||||||
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
* util.getGitHubVersion() that automatically supplies GitHub API details using
|
||||||
@@ -71,7 +128,269 @@ let cachedGitHubVersion: GitHubVersion | undefined = undefined;
|
|||||||
*/
|
*/
|
||||||
export async function getGitHubVersion(): Promise<GitHubVersion> {
|
export async function getGitHubVersion(): Promise<GitHubVersion> {
|
||||||
if (cachedGitHubVersion === undefined) {
|
if (cachedGitHubVersion === undefined) {
|
||||||
cachedGitHubVersion = await util.getGitHubVersion(getApiDetails());
|
cachedGitHubVersion = await getGitHubVersionFromApi(
|
||||||
|
getApiClient(),
|
||||||
|
getApiDetails()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return cachedGitHubVersion;
|
return cachedGitHubVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compose a StatusReport.
|
||||||
|
*
|
||||||
|
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||||
|
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||||
|
* @param startedAt The time this action started executing.
|
||||||
|
* @param cause Cause of failure (only supply if status is 'failure')
|
||||||
|
* @param exception Exception (only supply if status is 'failure')
|
||||||
|
*/
|
||||||
|
export async function createStatusReportBase(
|
||||||
|
actionName: ActionName,
|
||||||
|
status: ActionStatus,
|
||||||
|
actionStartedAt: Date,
|
||||||
|
cause?: string,
|
||||||
|
exception?: string
|
||||||
|
): Promise<StatusReportBase> {
|
||||||
|
const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
||||||
|
const ref = await getRef();
|
||||||
|
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
||||||
|
const workflowRunID = getWorkflowRunID();
|
||||||
|
const workflowRunAttempt = getWorkflowRunAttempt();
|
||||||
|
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||||
|
const jobName = process.env["GITHUB_JOB"] || "";
|
||||||
|
const analysis_key = await getAnalysisKey();
|
||||||
|
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
||||||
|
if (workflowStartedAt === undefined) {
|
||||||
|
workflowStartedAt = actionStartedAt.toISOString();
|
||||||
|
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||||
|
}
|
||||||
|
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
||||||
|
const codeQlCliVersion = getCachedCodeQlVersion();
|
||||||
|
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||||
|
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
||||||
|
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||||
|
// even if it was only set for this step
|
||||||
|
if (testingEnvironment !== "") {
|
||||||
|
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||||
|
}
|
||||||
|
|
||||||
|
const statusReport: StatusReportBase = {
|
||||||
|
job_run_uuid: jobRunUUID,
|
||||||
|
workflow_run_id: workflowRunID,
|
||||||
|
workflow_run_attempt: workflowRunAttempt,
|
||||||
|
workflow_name: workflowName,
|
||||||
|
job_name: jobName,
|
||||||
|
analysis_key,
|
||||||
|
commit_oid: commitOid,
|
||||||
|
ref,
|
||||||
|
action_name: actionName,
|
||||||
|
action_ref: actionRef,
|
||||||
|
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||||
|
started_at: workflowStartedAt,
|
||||||
|
action_started_at: actionStartedAt.toISOString(),
|
||||||
|
status,
|
||||||
|
testing_environment: testingEnvironment,
|
||||||
|
runner_os: runnerOs,
|
||||||
|
action_version: getActionVersion(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add optional parameters
|
||||||
|
if (cause) {
|
||||||
|
statusReport.cause = cause;
|
||||||
|
}
|
||||||
|
if (exception) {
|
||||||
|
statusReport.exception = exception;
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
status === "success" ||
|
||||||
|
status === "failure" ||
|
||||||
|
status === "aborted" ||
|
||||||
|
status === "user-error"
|
||||||
|
) {
|
||||||
|
statusReport.completed_at = new Date().toISOString();
|
||||||
|
}
|
||||||
|
const matrix = getRequiredInput("matrix");
|
||||||
|
if (matrix) {
|
||||||
|
statusReport.matrix_vars = matrix;
|
||||||
|
}
|
||||||
|
if ("RUNNER_ARCH" in process.env) {
|
||||||
|
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||||
|
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||||
|
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||||
|
}
|
||||||
|
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||||
|
statusReport.runner_os_release = os.release();
|
||||||
|
}
|
||||||
|
if (codeQlCliVersion !== undefined) {
|
||||||
|
statusReport.codeql_version = codeQlCliVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
return statusReport;
|
||||||
|
}
|
||||||
|
|
||||||
|
const GENERIC_403_MSG =
|
||||||
|
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||||
|
const GENERIC_404_MSG =
|
||||||
|
"Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||||
|
const OUT_OF_DATE_MSG =
|
||||||
|
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
|
const INCOMPATIBLE_MSG =
|
||||||
|
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
|
*
|
||||||
|
* Optionally checks the response from the API endpoint and sets the action
|
||||||
|
* as failed if the status report failed. This is only expected to be used
|
||||||
|
* when sending a 'starting' report.
|
||||||
|
*
|
||||||
|
* Returns whether sending the status report was successful of not.
|
||||||
|
*/
|
||||||
|
export async function sendStatusReport<S extends StatusReportBase>(
|
||||||
|
statusReport: S
|
||||||
|
): Promise<boolean> {
|
||||||
|
const statusReportJSON = JSON.stringify(statusReport);
|
||||||
|
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||||
|
// If in test mode we don't want to upload the results
|
||||||
|
if (isInTestMode()) {
|
||||||
|
core.debug("In test mode. Status reports are not uploaded.");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||||
|
const [owner, repo] = nwo.split("/");
|
||||||
|
const client = getApiClient();
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.request(
|
||||||
|
"PUT /repos/:owner/:repo/code-scanning/analysis/status",
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
data: statusReportJSON,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
console.log(e);
|
||||||
|
if (isHTTPError(e)) {
|
||||||
|
switch (e.status) {
|
||||||
|
case 403:
|
||||||
|
if (
|
||||||
|
getWorkflowEventName() === "push" &&
|
||||||
|
process.env["GITHUB_ACTOR"] === "dependabot[bot]"
|
||||||
|
) {
|
||||||
|
core.setFailed(
|
||||||
|
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||||
|
"Uploading Code Scanning results requires write access. " +
|
||||||
|
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||||
|
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
core.setFailed(e.message || GENERIC_403_MSG);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
case 404:
|
||||||
|
core.setFailed(GENERIC_404_MSG);
|
||||||
|
return false;
|
||||||
|
case 422:
|
||||||
|
// schema incompatibility when reporting status
|
||||||
|
// this means that this action version is no longer compatible with the API
|
||||||
|
// we still want to continue as it is likely the analysis endpoint will work
|
||||||
|
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
||||||
|
core.debug(INCOMPATIBLE_MSG);
|
||||||
|
} else {
|
||||||
|
core.debug(OUT_OF_DATE_MSG);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// something else has gone wrong and the request/response will be logged by octokit
|
||||||
|
// it's possible this is a transient error and we should continue scanning
|
||||||
|
core.error(
|
||||||
|
"An unexpected error occurred when sending code scanning status report."
|
||||||
|
);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow relative to the repository root.
|
||||||
|
*/
|
||||||
|
export async function getWorkflowRelativePath(): Promise<string> {
|
||||||
|
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
||||||
|
|
||||||
|
const apiClient = getApiClient();
|
||||||
|
const runsResponse = await apiClient.request(
|
||||||
|
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the analysis key parameter for the current job.
|
||||||
|
*
|
||||||
|
* This will combine the workflow path and current job name.
|
||||||
|
* Computing this the first time requires making requests to
|
||||||
|
* the GitHub API, but after that the result will be cached.
|
||||||
|
*/
|
||||||
|
export async function getAnalysisKey(): Promise<string> {
|
||||||
|
const analysisKeyEnvVar = "CODEQL_ACTION_ANALYSIS_KEY";
|
||||||
|
|
||||||
|
let analysisKey = process.env[analysisKeyEnvVar];
|
||||||
|
if (analysisKey !== undefined) {
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowPath = await getWorkflowRelativePath();
|
||||||
|
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
||||||
|
|
||||||
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
|
return analysisKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAutomationID(): Promise<string> {
|
||||||
|
const analysis_key = await getAnalysisKey();
|
||||||
|
const environment = getRequiredInput("matrix");
|
||||||
|
|
||||||
|
return computeAutomationID(analysis_key, environment);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function computeAutomationID(
|
||||||
|
analysis_key: string,
|
||||||
|
environment: string | undefined
|
||||||
|
): string {
|
||||||
|
let automationID = `${analysis_key}/`;
|
||||||
|
|
||||||
|
const matrix = parseMatrixInput(environment);
|
||||||
|
if (matrix !== undefined) {
|
||||||
|
// the id has to be deterministic so we sort the fields
|
||||||
|
for (const entry of Object.entries(matrix).sort()) {
|
||||||
|
if (typeof entry[1] === "string") {
|
||||||
|
automationID += `${entry[0]}:${entry[1]}/`;
|
||||||
|
} else {
|
||||||
|
// In code scanning we just handle the string values,
|
||||||
|
// the rest get converted to the empty string
|
||||||
|
automationID += `${entry[0]}:/`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return automationID;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
createStatusReportBase,
|
|
||||||
getActionsStatus,
|
getActionsStatus,
|
||||||
getActionVersion,
|
getActionVersion,
|
||||||
getOptionalInput,
|
getOptionalInput,
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
sendStatusReport,
|
|
||||||
StatusReportBase,
|
StatusReportBase,
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import {
|
||||||
|
createStatusReportBase,
|
||||||
|
getGitHubVersion,
|
||||||
|
sendStatusReport,
|
||||||
|
} from "./api-client";
|
||||||
import { determineAutobuildLanguages, runAutobuild } from "./autobuild";
|
import { determineAutobuildLanguages, runAutobuild } from "./autobuild";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
|
|||||||
@@ -16,9 +16,9 @@ import * as codeql from "./codeql";
|
|||||||
import { AugmentationProperties, Config } from "./config-utils";
|
import { AugmentationProperties, Config } from "./config-utils";
|
||||||
import * as defaults from "./defaults.json";
|
import * as defaults from "./defaults.json";
|
||||||
import { Feature, featureConfig } from "./feature-flags";
|
import { Feature, featureConfig } from "./feature-flags";
|
||||||
import { ToolsSource } from "./init";
|
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { getRunnerLogger } from "./logging";
|
import { getRunnerLogger } from "./logging";
|
||||||
|
import { ToolsSource } from "./setup-codeql";
|
||||||
import {
|
import {
|
||||||
setupTests,
|
setupTests,
|
||||||
createFeatures,
|
createFeatures,
|
||||||
|
|||||||
@@ -5,25 +5,22 @@ import * as core from "@actions/core";
|
|||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import { getOptionalInput } from "./actions-util";
|
import { getOptionalInput, isAnalyzingDefaultBranch } from "./actions-util";
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { Config, getGeneratedCodeScanningConfigPath } from "./config-utils";
|
import type { Config } from "./config-utils";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { errorMatchers } from "./error-matcher";
|
import { errorMatchers } from "./error-matcher";
|
||||||
import {
|
import {
|
||||||
|
CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
||||||
CodeQLDefaultVersionInfo,
|
CodeQLDefaultVersionInfo,
|
||||||
Feature,
|
Feature,
|
||||||
FeatureEnablement,
|
FeatureEnablement,
|
||||||
|
useCodeScanningConfigInCli,
|
||||||
} from "./feature-flags";
|
} from "./feature-flags";
|
||||||
import { ToolsSource } from "./init";
|
|
||||||
import { isTracedLanguage, Language } from "./languages";
|
import { isTracedLanguage, Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import * as setupCodeql from "./setup-codeql";
|
import * as setupCodeql from "./setup-codeql";
|
||||||
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
||||||
import {
|
|
||||||
getTrapCachingExtractorConfigArgs,
|
|
||||||
getTrapCachingExtractorConfigArgsForLang,
|
|
||||||
} from "./trap-caching";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { wrapError } from "./util";
|
import { wrapError } from "./util";
|
||||||
|
|
||||||
@@ -315,16 +312,6 @@ export const CODEQL_VERSION_INIT_WITH_QLCONFIG = "2.12.4";
|
|||||||
*/
|
*/
|
||||||
export const CODEQL_VERSION_RESOLVE_ENVIRONMENT = "2.13.4";
|
export const CODEQL_VERSION_RESOLVE_ENVIRONMENT = "2.13.4";
|
||||||
|
|
||||||
/**
|
|
||||||
* Versions 2.13.4+ of the CodeQL CLI have an associated CodeQL Bundle release that is semantically versioned.
|
|
||||||
*/
|
|
||||||
export const CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
|
||||||
*/
|
|
||||||
export const CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set up CodeQL CLI access.
|
* Set up CodeQL CLI access.
|
||||||
*
|
*
|
||||||
@@ -349,7 +336,7 @@ export async function setupCodeQL(
|
|||||||
): Promise<{
|
): Promise<{
|
||||||
codeql: CodeQL;
|
codeql: CodeQL;
|
||||||
toolsDownloadDurationMs?: number;
|
toolsDownloadDurationMs?: number;
|
||||||
toolsSource: ToolsSource;
|
toolsSource: setupCodeql.ToolsSource;
|
||||||
toolsVersion: string;
|
toolsVersion: string;
|
||||||
}> {
|
}> {
|
||||||
try {
|
try {
|
||||||
@@ -1170,7 +1157,7 @@ async function generateCodeScanningConfig(
|
|||||||
features: FeatureEnablement,
|
features: FeatureEnablement,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
if (!(await util.useCodeScanningConfigInCli(codeql, features))) {
|
if (!(await useCodeScanningConfigInCli(codeql, features))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config);
|
const codeScanningConfigFile = getGeneratedCodeScanningConfigPath(config);
|
||||||
@@ -1268,3 +1255,40 @@ async function getCodeScanningConfigExportArguments(
|
|||||||
}
|
}
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This constant sets the size of each TRAP cache in megabytes.
|
||||||
|
const TRAP_CACHE_SIZE_MB = 1024;
|
||||||
|
|
||||||
|
export async function getTrapCachingExtractorConfigArgs(
|
||||||
|
config: Config
|
||||||
|
): Promise<string[]> {
|
||||||
|
const result: string[][] = [];
|
||||||
|
for (const language of config.languages)
|
||||||
|
result.push(
|
||||||
|
await getTrapCachingExtractorConfigArgsForLang(config, language)
|
||||||
|
);
|
||||||
|
return result.flat();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTrapCachingExtractorConfigArgsForLang(
|
||||||
|
config: Config,
|
||||||
|
language: Language
|
||||||
|
): Promise<string[]> {
|
||||||
|
const cacheDir = config.trapCaches[language];
|
||||||
|
if (cacheDir === undefined) return [];
|
||||||
|
const write = await isAnalyzingDefaultBranch();
|
||||||
|
return [
|
||||||
|
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
||||||
|
`-O=${language}.trap.cache.bound=${TRAP_CACHE_SIZE_MB}`,
|
||||||
|
`-O=${language}.trap.cache.write=${write}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the path to the code scanning configuration generated by the CLI.
|
||||||
|
*
|
||||||
|
* This will not exist if the configuration is being parsed in the Action.
|
||||||
|
*/
|
||||||
|
export function getGeneratedCodeScanningConfigPath(config: Config): string {
|
||||||
|
return path.resolve(config.tempDir, "user-config.yaml");
|
||||||
|
}
|
||||||
|
|||||||
@@ -23,7 +23,16 @@ import {
|
|||||||
createFeatures,
|
createFeatures,
|
||||||
mockLanguagesInRepo as mockLanguagesInRepo,
|
mockLanguagesInRepo as mockLanguagesInRepo,
|
||||||
} from "./testing-utils";
|
} from "./testing-utils";
|
||||||
import { GitHubVariant, GitHubVersion, UserError, withTmpDir } from "./util";
|
import {
|
||||||
|
DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
|
DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
ML_POWERED_JS_QUERIES_PACK_NAME,
|
||||||
|
GitHubVariant,
|
||||||
|
GitHubVersion,
|
||||||
|
prettyPrintPack,
|
||||||
|
UserError,
|
||||||
|
withTmpDir,
|
||||||
|
} from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
@@ -1906,7 +1915,7 @@ const packSpecPrettyPrintingMacro = test.macro({
|
|||||||
exec: (t: ExecutionContext, packStr: string, packObj: configUtils.Pack) => {
|
exec: (t: ExecutionContext, packStr: string, packObj: configUtils.Pack) => {
|
||||||
const parsed = configUtils.parsePacksSpecification(packStr);
|
const parsed = configUtils.parsePacksSpecification(packStr);
|
||||||
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
t.deepEqual(parsed, packObj, "parsed pack spec is correct");
|
||||||
const stringified = configUtils.prettyPrintPack(packObj);
|
const stringified = prettyPrintPack(packObj);
|
||||||
t.deepEqual(
|
t.deepEqual(
|
||||||
stringified,
|
stringified,
|
||||||
packStr.trim(),
|
packStr.trim(),
|
||||||
@@ -2809,3 +2818,66 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
|
|||||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const ML_POWERED_JS_STATUS_TESTS: Array<[string[], string]> = [
|
||||||
|
// If no packs are loaded, status is false.
|
||||||
|
[[], "false"],
|
||||||
|
// If another pack is loaded but not the ML-powered query pack, status is false.
|
||||||
|
[["some-other/pack"], "false"],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
||||||
|
[[`${ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
||||||
|
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
||||||
|
// status is the version of the ML-powered query pack.
|
||||||
|
[["some-other/pack", `${ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
||||||
|
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
||||||
|
[[ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
|
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
||||||
|
[
|
||||||
|
[
|
||||||
|
`${ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.1`,
|
||||||
|
`${ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.2`,
|
||||||
|
],
|
||||||
|
"other",
|
||||||
|
],
|
||||||
|
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
||||||
|
// the status is "latest".
|
||||||
|
[["some-other/pack", ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||||
|
const packDescriptions = `[${packs
|
||||||
|
.map((pack) => JSON.stringify(pack))
|
||||||
|
.join(", ")}]`;
|
||||||
|
test(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
|
||||||
|
return withTmpDir(async (tmpDir) => {
|
||||||
|
const config: configUtils.Config = {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
paths: [],
|
||||||
|
pathsIgnore: [],
|
||||||
|
originalUserInput: {},
|
||||||
|
tempDir: tmpDir,
|
||||||
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: GitHubVariant.DOTCOM,
|
||||||
|
} as GitHubVersion,
|
||||||
|
dbLocation: "",
|
||||||
|
packs: {
|
||||||
|
javascript: packs,
|
||||||
|
},
|
||||||
|
debugMode: false,
|
||||||
|
debugArtifactName: DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
|
debugDatabaseName: DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
augmentationProperties: {
|
||||||
|
injectedMlQueries: false,
|
||||||
|
packsInputCombines: false,
|
||||||
|
queriesInputCombines: false,
|
||||||
|
},
|
||||||
|
trapCaches: {},
|
||||||
|
trapCacheDownloadTime: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
t.is(configUtils.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,7 +13,12 @@ import {
|
|||||||
ResolveQueriesOutput,
|
ResolveQueriesOutput,
|
||||||
} from "./codeql";
|
} from "./codeql";
|
||||||
import * as externalQueries from "./external-queries";
|
import * as externalQueries from "./external-queries";
|
||||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
import {
|
||||||
|
Feature,
|
||||||
|
FeatureEnablement,
|
||||||
|
logCodeScanningConfigInCli,
|
||||||
|
useCodeScanningConfigInCli,
|
||||||
|
} from "./feature-flags";
|
||||||
import {
|
import {
|
||||||
Language,
|
Language,
|
||||||
LanguageOrAlias,
|
LanguageOrAlias,
|
||||||
@@ -27,9 +32,8 @@ import {
|
|||||||
codeQlVersionAbove,
|
codeQlVersionAbove,
|
||||||
getMlPoweredJsQueriesPack,
|
getMlPoweredJsQueriesPack,
|
||||||
GitHubVersion,
|
GitHubVersion,
|
||||||
logCodeScanningConfigInCli,
|
|
||||||
ML_POWERED_JS_QUERIES_PACK_NAME,
|
ML_POWERED_JS_QUERIES_PACK_NAME,
|
||||||
useCodeScanningConfigInCli,
|
prettyPrintPack,
|
||||||
UserError,
|
UserError,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
|
||||||
@@ -1595,12 +1599,6 @@ export function parsePacksSpecification(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function prettyPrintPack(pack: Pack) {
|
|
||||||
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${
|
|
||||||
pack.path ? `:${pack.path}` : ""
|
|
||||||
}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function validatePackSpecification(pack: string, configFile?: string) {
|
export function validatePackSpecification(pack: string, configFile?: string) {
|
||||||
return prettyPrintPack(parsePacksSpecification(pack, configFile));
|
return prettyPrintPack(parsePacksSpecification(pack, configFile));
|
||||||
}
|
}
|
||||||
@@ -1666,6 +1664,48 @@ function combinePacks(packs1: Packs, packs2: Packs): Packs {
|
|||||||
return packs;
|
return packs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get information about ML-powered JS queries to populate status reports with.
|
||||||
|
*
|
||||||
|
* This will be:
|
||||||
|
*
|
||||||
|
* - The version string if the analysis is using a single version of the ML-powered query pack.
|
||||||
|
* - "latest" if the version string of the ML-powered query pack is undefined. This is unlikely to
|
||||||
|
* occur in practice (see comment below).
|
||||||
|
* - "false" if the analysis won't run any ML-powered JS queries.
|
||||||
|
* - "other" in all other cases.
|
||||||
|
*
|
||||||
|
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
|
||||||
|
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
|
||||||
|
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
|
||||||
|
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
||||||
|
* version strings.
|
||||||
|
*
|
||||||
|
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
||||||
|
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
||||||
|
* explanation as to why this is.
|
||||||
|
*/
|
||||||
|
export function getMlPoweredJsQueriesStatus(config: Config): string {
|
||||||
|
const mlPoweredJsQueryPacks = (config.packs.javascript || [])
|
||||||
|
.map((p) => parsePacksSpecification(p))
|
||||||
|
.filter(
|
||||||
|
(pack) => pack.name === ML_POWERED_JS_QUERIES_PACK_NAME && !pack.path
|
||||||
|
);
|
||||||
|
switch (mlPoweredJsQueryPacks.length) {
|
||||||
|
case 1:
|
||||||
|
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
||||||
|
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
||||||
|
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
||||||
|
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
||||||
|
// CodeQL config.
|
||||||
|
return mlPoweredJsQueryPacks[0].version || "latest";
|
||||||
|
case 0:
|
||||||
|
return "false";
|
||||||
|
default:
|
||||||
|
return "other";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function dbLocationOrDefault(
|
function dbLocationOrDefault(
|
||||||
dbLocation: string | undefined,
|
dbLocation: string | undefined,
|
||||||
tempDir: string
|
tempDir: string
|
||||||
@@ -2075,12 +2115,3 @@ export async function wrapEnvironment(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the path to the code scanning configuration generated by the CLI.
|
|
||||||
*
|
|
||||||
* This will not exist if the configuration is being parsed in the Action.
|
|
||||||
*/
|
|
||||||
export function getGeneratedCodeScanningConfigPath(config: Config): string {
|
|
||||||
return path.resolve(config.tempDir, "user-config.yaml");
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ export const namedMatchersForTesting: { [key: string]: ErrorMatcher } = {
|
|||||||
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
|
outputRegex: new RegExp("No JavaScript or TypeScript code found\\."),
|
||||||
message:
|
message:
|
||||||
"No code found during the build. Please see:\n" +
|
"No code found during the build. Please see:\n" +
|
||||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
"https://gh.io/troubleshooting-code-scanning/no-source-code-seen-during-build",
|
||||||
},
|
},
|
||||||
fatalError: {
|
fatalError: {
|
||||||
outputRegex: new RegExp("A fatal error occurred"),
|
outputRegex: new RegExp("A fatal error occurred"),
|
||||||
|
|||||||
@@ -4,11 +4,7 @@ import * as path from "path";
|
|||||||
import * as semver from "semver";
|
import * as semver from "semver";
|
||||||
|
|
||||||
import { getApiClient } from "./api-client";
|
import { getApiClient } from "./api-client";
|
||||||
import {
|
import type { CodeQL } from "./codeql";
|
||||||
CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED,
|
|
||||||
CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
|
||||||
CodeQL,
|
|
||||||
} from "./codeql";
|
|
||||||
import * as defaults from "./defaults.json";
|
import * as defaults from "./defaults.json";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { RepositoryNwo } from "./repository";
|
import { RepositoryNwo } from "./repository";
|
||||||
@@ -17,6 +13,16 @@ import * as util from "./util";
|
|||||||
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Versions 2.13.4+ of the CodeQL CLI have an associated CodeQL Bundle release that is semantically versioned.
|
||||||
|
*/
|
||||||
|
export const CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
||||||
|
*/
|
||||||
|
export const CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
||||||
|
|
||||||
export interface CodeQLDefaultVersionInfo {
|
export interface CodeQLDefaultVersionInfo {
|
||||||
cliVersion: string;
|
cliVersion: string;
|
||||||
tagName: string;
|
tagName: string;
|
||||||
@@ -444,3 +450,30 @@ class GitHubFeatureFlags {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @returns Whether the Action should generate a code scanning config file
|
||||||
|
* that gets passed to the CLI.
|
||||||
|
*/
|
||||||
|
export async function useCodeScanningConfigInCli(
|
||||||
|
codeql: CodeQL,
|
||||||
|
features: FeatureEnablement
|
||||||
|
): Promise<boolean> {
|
||||||
|
return await features.getValue(Feature.CliConfigFileEnabled, codeql);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function logCodeScanningConfigInCli(
|
||||||
|
codeql: CodeQL,
|
||||||
|
features: FeatureEnablement,
|
||||||
|
logger: Logger
|
||||||
|
) {
|
||||||
|
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||||
|
logger.info(
|
||||||
|
"Code Scanning configuration file being processed in the codeql CLI."
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
"Code Scanning configuration file being processed in the codeql-action."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -7,14 +7,16 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
createStatusReportBase,
|
|
||||||
getActionsStatus,
|
getActionsStatus,
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
printDebugLogs,
|
printDebugLogs,
|
||||||
sendStatusReport,
|
|
||||||
StatusReportBase,
|
StatusReportBase,
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import {
|
||||||
|
createStatusReportBase,
|
||||||
|
getGitHubVersion,
|
||||||
|
sendStatusReport,
|
||||||
|
} from "./api-client";
|
||||||
import * as debugArtifacts from "./debug-artifacts";
|
import * as debugArtifacts from "./debug-artifacts";
|
||||||
import { Features } from "./feature-flags";
|
import { Features } from "./feature-flags";
|
||||||
import * as initActionPostHelper from "./init-action-post-helper";
|
import * as initActionPostHelper from "./init-action-post-helper";
|
||||||
|
|||||||
@@ -4,30 +4,28 @@ import * as core from "@actions/core";
|
|||||||
import { v4 as uuidV4 } from "uuid";
|
import { v4 as uuidV4 } from "uuid";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
createStatusReportBase,
|
|
||||||
getActionsStatus,
|
getActionsStatus,
|
||||||
getActionVersion,
|
getActionVersion,
|
||||||
getOptionalInput,
|
getOptionalInput,
|
||||||
getRequiredInput,
|
getRequiredInput,
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
sendStatusReport,
|
|
||||||
StatusReportBase,
|
StatusReportBase,
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import {
|
||||||
|
createStatusReportBase,
|
||||||
|
getGitHubVersion,
|
||||||
|
sendStatusReport,
|
||||||
|
} from "./api-client";
|
||||||
import { CodeQL } from "./codeql";
|
import { CodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
|
import { getMlPoweredJsQueriesStatus } from "./config-utils";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { Feature, Features } from "./feature-flags";
|
import { Feature, Features } from "./feature-flags";
|
||||||
import {
|
import { initCodeQL, initConfig, installPythonDeps, runInit } from "./init";
|
||||||
initCodeQL,
|
|
||||||
initConfig,
|
|
||||||
installPythonDeps,
|
|
||||||
runInit,
|
|
||||||
ToolsSource,
|
|
||||||
} from "./init";
|
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { getActionsLogger, Logger } from "./logging";
|
import { getActionsLogger, Logger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import { parseRepositoryNwo } from "./repository";
|
||||||
|
import { ToolsSource } from "./setup-codeql";
|
||||||
import { getTotalCacheSize } from "./trap-caching";
|
import { getTotalCacheSize } from "./trap-caching";
|
||||||
import {
|
import {
|
||||||
checkForTimeout,
|
checkForTimeout,
|
||||||
@@ -35,7 +33,6 @@ import {
|
|||||||
DEFAULT_DEBUG_ARTIFACT_NAME,
|
DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
DEFAULT_DEBUG_DATABASE_NAME,
|
DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
getMemoryFlagValue,
|
getMemoryFlagValue,
|
||||||
getMlPoweredJsQueriesStatus,
|
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
getThreadsFlagValue,
|
getThreadsFlagValue,
|
||||||
initializeEnvironment,
|
initializeEnvironment,
|
||||||
@@ -330,7 +327,10 @@ async function run() {
|
|||||||
core.exportVariable(
|
core.exportVariable(
|
||||||
"CODEQL_RAM",
|
"CODEQL_RAM",
|
||||||
process.env["CODEQL_RAM"] ||
|
process.env["CODEQL_RAM"] ||
|
||||||
(await getMemoryFlagValue(getOptionalInput("ram"), features)).toString()
|
getMemoryFlagValue(
|
||||||
|
getOptionalInput("ram"),
|
||||||
|
await features.getValue(Feature.ScalingReservedRamEnabled)
|
||||||
|
).toString()
|
||||||
);
|
);
|
||||||
core.exportVariable(
|
core.exportVariable(
|
||||||
"CODEQL_THREADS",
|
"CODEQL_THREADS",
|
||||||
|
|||||||
16
src/init.ts
16
src/init.ts
@@ -8,19 +8,17 @@ import * as analysisPaths from "./analysis-paths";
|
|||||||
import { GitHubApiCombinedDetails, GitHubApiDetails } from "./api-client";
|
import { GitHubApiCombinedDetails, GitHubApiDetails } from "./api-client";
|
||||||
import { CodeQL, setupCodeQL } from "./codeql";
|
import { CodeQL, setupCodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags";
|
import {
|
||||||
|
CodeQLDefaultVersionInfo,
|
||||||
|
FeatureEnablement,
|
||||||
|
useCodeScanningConfigInCli,
|
||||||
|
} from "./feature-flags";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { RepositoryNwo } from "./repository";
|
import { RepositoryNwo } from "./repository";
|
||||||
|
import { ToolsSource } from "./setup-codeql";
|
||||||
import { TracerConfig, getCombinedTracerConfig } from "./tracer-config";
|
import { TracerConfig, getCombinedTracerConfig } from "./tracer-config";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
export enum ToolsSource {
|
|
||||||
Unknown = "UNKNOWN",
|
|
||||||
Local = "LOCAL",
|
|
||||||
Toolcache = "TOOLCACHE",
|
|
||||||
Download = "DOWNLOAD",
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function initCodeQL(
|
export async function initCodeQL(
|
||||||
toolsInput: string | undefined,
|
toolsInput: string | undefined,
|
||||||
apiDetails: GitHubApiDetails,
|
apiDetails: GitHubApiDetails,
|
||||||
@@ -116,7 +114,7 @@ export async function runInit(
|
|||||||
// before the `pack download` command was invoked. It is not required for the init command.
|
// before the `pack download` command was invoked. It is not required for the init command.
|
||||||
let registriesAuthTokens: string | undefined;
|
let registriesAuthTokens: string | undefined;
|
||||||
let qlconfigFile: string | undefined;
|
let qlconfigFile: string | undefined;
|
||||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||||
({ registriesAuthTokens, qlconfigFile } =
|
({ registriesAuthTokens, qlconfigFile } =
|
||||||
await configUtils.generateRegistries(
|
await configUtils.generateRegistries(
|
||||||
registriesInput,
|
registriesInput,
|
||||||
|
|||||||
@@ -1,14 +1,16 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
createStatusReportBase,
|
|
||||||
getActionsStatus,
|
getActionsStatus,
|
||||||
getOptionalInput,
|
getOptionalInput,
|
||||||
getRequiredInput,
|
getRequiredInput,
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
sendStatusReport,
|
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import {
|
||||||
|
createStatusReportBase,
|
||||||
|
getGitHubVersion,
|
||||||
|
sendStatusReport,
|
||||||
|
} from "./api-client";
|
||||||
import { CommandInvocationError } from "./codeql";
|
import { CommandInvocationError } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { Language, resolveAlias } from "./languages";
|
import { Language, resolveAlias } from "./languages";
|
||||||
|
|||||||
@@ -15,11 +15,17 @@ import * as api from "./api-client";
|
|||||||
// these dependents.
|
// these dependents.
|
||||||
import * as defaults from "./defaults.json";
|
import * as defaults from "./defaults.json";
|
||||||
import { CodeQLDefaultVersionInfo } from "./feature-flags";
|
import { CodeQLDefaultVersionInfo } from "./feature-flags";
|
||||||
import { ToolsSource } from "./init";
|
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { isGoodVersion, wrapError } from "./util";
|
import { isGoodVersion, wrapError } from "./util";
|
||||||
|
|
||||||
|
export enum ToolsSource {
|
||||||
|
Unknown = "UNKNOWN",
|
||||||
|
Local = "LOCAL",
|
||||||
|
Toolcache = "TOOLCACHE",
|
||||||
|
Download = "DOWNLOAD",
|
||||||
|
}
|
||||||
|
|
||||||
export const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
export const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
|
||||||
|
|
||||||
function getCodeQLBundleName(): string {
|
function getCodeQLBundleName(): string {
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ import test from "ava";
|
|||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { setCodeQL } from "./codeql";
|
import {
|
||||||
|
setCodeQL,
|
||||||
|
getTrapCachingExtractorConfigArgs,
|
||||||
|
getTrapCachingExtractorConfigArgsForLang,
|
||||||
|
} from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
@@ -14,8 +18,6 @@ import { getRecordingLogger, setupTests } from "./testing-utils";
|
|||||||
import {
|
import {
|
||||||
downloadTrapCaches,
|
downloadTrapCaches,
|
||||||
getLanguagesSupportingCaching,
|
getLanguagesSupportingCaching,
|
||||||
getTrapCachingExtractorConfigArgs,
|
|
||||||
getTrapCachingExtractorConfigArgsForLang,
|
|
||||||
uploadTrapCaches,
|
uploadTrapCaches,
|
||||||
} from "./trap-caching";
|
} from "./trap-caching";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import * as cache from "@actions/cache";
|
|||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { CodeQL, CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES } from "./codeql";
|
import { CodeQL, CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES } from "./codeql";
|
||||||
import { Config } from "./config-utils";
|
import type { Config } from "./config-utils";
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { codeQlVersionAbove, tryGetFolderBytes, withTimeout } from "./util";
|
import { codeQlVersionAbove, tryGetFolderBytes, withTimeout } from "./util";
|
||||||
@@ -17,9 +17,6 @@ import { codeQlVersionAbove, tryGetFolderBytes, withTimeout } from "./util";
|
|||||||
// goes into the cache key.
|
// goes into the cache key.
|
||||||
const CACHE_VERSION = 1;
|
const CACHE_VERSION = 1;
|
||||||
|
|
||||||
// This constant sets the size of each TRAP cache in megabytes.
|
|
||||||
const CACHE_SIZE_MB = 1024;
|
|
||||||
|
|
||||||
// This constant sets the minimum size in megabytes of a TRAP
|
// This constant sets the minimum size in megabytes of a TRAP
|
||||||
// cache for us to consider it worth uploading.
|
// cache for us to consider it worth uploading.
|
||||||
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
||||||
@@ -30,31 +27,6 @@ const MINIMUM_CACHE_MB_TO_UPLOAD = 10;
|
|||||||
// times as there are languages with TRAP caching enabled.
|
// times as there are languages with TRAP caching enabled.
|
||||||
const MAX_CACHE_OPERATION_MS = 120_000; // Two minutes
|
const MAX_CACHE_OPERATION_MS = 120_000; // Two minutes
|
||||||
|
|
||||||
export async function getTrapCachingExtractorConfigArgs(
|
|
||||||
config: Config
|
|
||||||
): Promise<string[]> {
|
|
||||||
const result: string[][] = [];
|
|
||||||
for (const language of config.languages)
|
|
||||||
result.push(
|
|
||||||
await getTrapCachingExtractorConfigArgsForLang(config, language)
|
|
||||||
);
|
|
||||||
return result.flat();
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getTrapCachingExtractorConfigArgsForLang(
|
|
||||||
config: Config,
|
|
||||||
language: Language
|
|
||||||
): Promise<string[]> {
|
|
||||||
const cacheDir = config.trapCaches[language];
|
|
||||||
if (cacheDir === undefined) return [];
|
|
||||||
const write = await actionsUtil.isAnalyzingDefaultBranch();
|
|
||||||
return [
|
|
||||||
`-O=${language}.trap.cache.dir=${cacheDir}`,
|
|
||||||
`-O=${language}.trap.cache.bound=${CACHE_SIZE_MB}`,
|
|
||||||
`-O=${language}.trap.cache.write=${write}`,
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Download TRAP caches from the Actions cache.
|
* Download TRAP caches from the Actions cache.
|
||||||
* @param codeql The CodeQL instance to use.
|
* @param codeql The CodeQL instance to use.
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ import { Logger } from "./logging";
|
|||||||
import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { SarifFile, SarifResult, SarifRun, wrapError } from "./util";
|
import { SarifFile, SarifResult, SarifRun, wrapError } from "./util";
|
||||||
import * as workflow from "./workflow";
|
|
||||||
|
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
@@ -81,7 +80,7 @@ function getAutomationID(
|
|||||||
return automationID;
|
return automationID;
|
||||||
}
|
}
|
||||||
|
|
||||||
return actionsUtil.computeAutomationID(analysis_key, environment);
|
return api.computeAutomationID(analysis_key, environment);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upload the given payload.
|
// Upload the given payload.
|
||||||
@@ -169,11 +168,11 @@ export async function uploadFromActions(
|
|||||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||||
await actionsUtil.getCommitOid(checkoutPath),
|
await actionsUtil.getCommitOid(checkoutPath),
|
||||||
await actionsUtil.getRef(),
|
await actionsUtil.getRef(),
|
||||||
await actionsUtil.getAnalysisKey(),
|
await api.getAnalysisKey(),
|
||||||
category,
|
category,
|
||||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
workflow.getWorkflowRunID(),
|
actionsUtil.getWorkflowRunID(),
|
||||||
workflow.getWorkflowRunAttempt(),
|
actionsUtil.getWorkflowRunAttempt(),
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
actionsUtil.getRequiredInput("matrix"),
|
actionsUtil.getRequiredInput("matrix"),
|
||||||
logger
|
logger
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import * as core from "@actions/core";
|
|||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { getActionVersion } from "./actions-util";
|
import { getActionVersion } from "./actions-util";
|
||||||
|
import { createStatusReportBase, sendStatusReport } from "./api-client";
|
||||||
import { getActionsLogger } from "./logging";
|
import { getActionsLogger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import { parseRepositoryNwo } from "./repository";
|
||||||
import * as upload_lib from "./upload-lib";
|
import * as upload_lib from "./upload-lib";
|
||||||
@@ -20,7 +21,7 @@ async function sendSuccessStatusReport(
|
|||||||
startedAt: Date,
|
startedAt: Date,
|
||||||
uploadStats: upload_lib.UploadStatusReport
|
uploadStats: upload_lib.UploadStatusReport
|
||||||
) {
|
) {
|
||||||
const statusReportBase = await actionsUtil.createStatusReportBase(
|
const statusReportBase = await createStatusReportBase(
|
||||||
"upload-sarif",
|
"upload-sarif",
|
||||||
"success",
|
"success",
|
||||||
startedAt
|
startedAt
|
||||||
@@ -29,19 +30,15 @@ async function sendSuccessStatusReport(
|
|||||||
...statusReportBase,
|
...statusReportBase,
|
||||||
...uploadStats,
|
...uploadStats,
|
||||||
};
|
};
|
||||||
await actionsUtil.sendStatusReport(statusReport);
|
await sendStatusReport(statusReport);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
initializeEnvironment(getActionVersion());
|
initializeEnvironment(getActionVersion());
|
||||||
if (
|
if (
|
||||||
!(await actionsUtil.sendStatusReport(
|
!(await sendStatusReport(
|
||||||
await actionsUtil.createStatusReportBase(
|
await createStatusReportBase("upload-sarif", "starting", startedAt)
|
||||||
"upload-sarif",
|
|
||||||
"starting",
|
|
||||||
startedAt
|
|
||||||
)
|
|
||||||
))
|
))
|
||||||
) {
|
) {
|
||||||
return;
|
return;
|
||||||
@@ -72,8 +69,8 @@ async function run() {
|
|||||||
const message = error.message;
|
const message = error.message;
|
||||||
core.setFailed(message);
|
core.setFailed(message);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
await actionsUtil.sendStatusReport(
|
await sendStatusReport(
|
||||||
await actionsUtil.createStatusReportBase(
|
await createStatusReportBase(
|
||||||
"upload-sarif",
|
"upload-sarif",
|
||||||
actionsUtil.getActionsStatus(error),
|
actionsUtil.getActionsStatus(error),
|
||||||
startedAt,
|
startedAt,
|
||||||
|
|||||||
240
src/util.test.ts
240
src/util.test.ts
@@ -2,20 +2,10 @@ import * as fs from "fs";
|
|||||||
import * as os from "os";
|
import * as os from "os";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
import * as github from "@actions/github";
|
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
|
||||||
|
|
||||||
import * as api from "./api-client";
|
|
||||||
import { Config } from "./config-utils";
|
|
||||||
import { Feature } from "./feature-flags";
|
|
||||||
import { getRunnerLogger } from "./logging";
|
import { getRunnerLogger } from "./logging";
|
||||||
import {
|
import { getRecordingLogger, LoggedMessage, setupTests } from "./testing-utils";
|
||||||
createFeatures,
|
|
||||||
getRecordingLogger,
|
|
||||||
LoggedMessage,
|
|
||||||
setupTests,
|
|
||||||
} from "./testing-utils";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
@@ -29,37 +19,83 @@ test("getToolNames", (t) => {
|
|||||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getMemoryFlag() should return the correct --ram flag", async (t) => {
|
const GET_MEMORY_FLAG_TESTS = [
|
||||||
const totalMem = os.totalmem() / (1024 * 1024);
|
{
|
||||||
const fixedAmount = process.platform === "win32" ? 1536 : 1024;
|
input: undefined,
|
||||||
const scaledAmount = 0.02 * totalMem;
|
totalMemoryMb: 8 * 1024,
|
||||||
const expectedMemoryValue = Math.floor(totalMem - fixedAmount);
|
platform: "linux",
|
||||||
const expectedMemoryValueWithScaling = Math.floor(
|
expectedMemoryValue: 7 * 1024,
|
||||||
totalMem - fixedAmount - scaledAmount
|
expectedMemoryValueWithScaling: 7 * 1024,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: undefined,
|
||||||
|
totalMemoryMb: 8 * 1024,
|
||||||
|
platform: "win32",
|
||||||
|
expectedMemoryValue: 6.5 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 6.5 * 1024,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "",
|
||||||
|
totalMemoryMb: 8 * 1024,
|
||||||
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 7 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 7 * 1024,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "512",
|
||||||
|
totalMemoryMb: 8 * 1024,
|
||||||
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 512,
|
||||||
|
expectedMemoryValueWithScaling: 512,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: undefined,
|
||||||
|
totalMemoryMb: 64 * 1024,
|
||||||
|
platform: "linux",
|
||||||
|
expectedMemoryValue: 63 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 63078, // Math.floor(1024 * (64 - 1 - 0.025 * (64 - 8)))
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: undefined,
|
||||||
|
totalMemoryMb: 64 * 1024,
|
||||||
|
platform: "win32",
|
||||||
|
expectedMemoryValue: 62.5 * 1024,
|
||||||
|
expectedMemoryValueWithScaling: 62566, // Math.floor(1024 * (64 - 1.5 - 0.025 * (64 - 8)))
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const {
|
||||||
|
input,
|
||||||
|
totalMemoryMb,
|
||||||
|
platform,
|
||||||
|
expectedMemoryValue,
|
||||||
|
expectedMemoryValueWithScaling,
|
||||||
|
} of GET_MEMORY_FLAG_TESTS) {
|
||||||
|
test(
|
||||||
|
`Memory flag value is ${expectedMemoryValue} without scaling and ${expectedMemoryValueWithScaling} with scaling ` +
|
||||||
|
`for ${
|
||||||
|
input ?? "no user input"
|
||||||
|
} on ${platform} with ${totalMemoryMb} MB total system RAM`,
|
||||||
|
async (t) => {
|
||||||
|
for (const withScaling of [true, false]) {
|
||||||
|
const flag = util.getMemoryFlagValueForPlatform(
|
||||||
|
input,
|
||||||
|
totalMemoryMb * 1024 * 1024,
|
||||||
|
platform,
|
||||||
|
withScaling
|
||||||
|
);
|
||||||
|
t.deepEqual(
|
||||||
|
flag,
|
||||||
|
withScaling ? expectedMemoryValueWithScaling : expectedMemoryValue
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
}
|
||||||
const tests: Array<[string | undefined, boolean, string]> = [
|
|
||||||
[undefined, false, `--ram=${expectedMemoryValue}`],
|
|
||||||
["", false, `--ram=${expectedMemoryValue}`],
|
|
||||||
["512", false, "--ram=512"],
|
|
||||||
[undefined, true, `--ram=${expectedMemoryValueWithScaling}`],
|
|
||||||
["", true, `--ram=${expectedMemoryValueWithScaling}`],
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const [input, withScaling, expectedFlag] of tests) {
|
|
||||||
const features = createFeatures(
|
|
||||||
withScaling ? [Feature.ScalingReservedRamEnabled] : []
|
|
||||||
);
|
|
||||||
const flag = await util.getMemoryFlag(input, features);
|
|
||||||
t.deepEqual(flag, expectedFlag);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
test("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
||||||
for (const input of ["-1", "hello!"]) {
|
for (const input of ["-1", "hello!"]) {
|
||||||
await t.throwsAsync(
|
t.throws(() => util.getMemoryFlag(input, false));
|
||||||
async () => await util.getMemoryFlag(input, createFeatures([]))
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -203,134 +239,6 @@ test("allowed API versions", async (t) => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
function mockGetMetaVersionHeader(
|
|
||||||
versionHeader: string | undefined
|
|
||||||
): sinon.SinonStub<any, any> {
|
|
||||||
// Passing an auth token is required, so we just use a dummy value
|
|
||||||
const client = github.getOctokit("123");
|
|
||||||
const response = {
|
|
||||||
headers: {
|
|
||||||
"x-github-enterprise-version": versionHeader,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const spyGetContents = sinon
|
|
||||||
.stub(client.rest.meta, "get")
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
|
||||||
.resolves(response as any);
|
|
||||||
sinon.stub(api, "getApiClient").value(() => client);
|
|
||||||
return spyGetContents;
|
|
||||||
}
|
|
||||||
|
|
||||||
test("getGitHubVersion", async (t) => {
|
|
||||||
const v = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://github.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
|
|
||||||
|
|
||||||
mockGetMetaVersionHeader("2.0");
|
|
||||||
const v2 = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://ghe.example.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual(
|
|
||||||
{ type: util.GitHubVariant.GHES, version: "2.0" } as util.GitHubVersion,
|
|
||||||
v2
|
|
||||||
);
|
|
||||||
|
|
||||||
mockGetMetaVersionHeader("GitHub AE");
|
|
||||||
const ghae = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://example.githubenterprise.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHAE }, ghae);
|
|
||||||
|
|
||||||
mockGetMetaVersionHeader(undefined);
|
|
||||||
const v3 = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://ghe.example.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
|
||||||
|
|
||||||
mockGetMetaVersionHeader("ghe.com");
|
|
||||||
const gheDotcom = await util.getGitHubVersion({
|
|
||||||
auth: "",
|
|
||||||
url: "https://foo.ghe.com",
|
|
||||||
apiURL: undefined,
|
|
||||||
});
|
|
||||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
|
||||||
});
|
|
||||||
|
|
||||||
const ML_POWERED_JS_STATUS_TESTS: Array<[string[], string]> = [
|
|
||||||
// If no packs are loaded, status is false.
|
|
||||||
[[], "false"],
|
|
||||||
// If another pack is loaded but not the ML-powered query pack, status is false.
|
|
||||||
[["some-other/pack"], "false"],
|
|
||||||
// If the ML-powered query pack is loaded with a specific version, status is that version.
|
|
||||||
[[`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`], "~0.1.0"],
|
|
||||||
// If the ML-powered query pack is loaded with a specific version and another pack is loaded, the
|
|
||||||
// status is the version of the ML-powered query pack.
|
|
||||||
[
|
|
||||||
["some-other/pack", `${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.1.0`],
|
|
||||||
"~0.1.0",
|
|
||||||
],
|
|
||||||
// If the ML-powered query pack is loaded without a version, the status is "latest".
|
|
||||||
[[util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
|
||||||
// If the ML-powered query pack is loaded with two different versions, the status is "other".
|
|
||||||
[
|
|
||||||
[
|
|
||||||
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.1`,
|
|
||||||
`${util.ML_POWERED_JS_QUERIES_PACK_NAME}@~0.0.2`,
|
|
||||||
],
|
|
||||||
"other",
|
|
||||||
],
|
|
||||||
// If the ML-powered query pack is loaded with no specific version, and another pack is loaded,
|
|
||||||
// the status is "latest".
|
|
||||||
[["some-other/pack", util.ML_POWERED_JS_QUERIES_PACK_NAME], "latest"],
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
|
||||||
const packDescriptions = `[${packs
|
|
||||||
.map((pack) => JSON.stringify(pack))
|
|
||||||
.join(", ")}]`;
|
|
||||||
test(`ML-powered JS queries status report is "${expectedStatus}" for packs = ${packDescriptions}`, (t) => {
|
|
||||||
return util.withTmpDir(async (tmpDir) => {
|
|
||||||
const config: Config = {
|
|
||||||
languages: [],
|
|
||||||
queries: {},
|
|
||||||
paths: [],
|
|
||||||
pathsIgnore: [],
|
|
||||||
originalUserInput: {},
|
|
||||||
tempDir: tmpDir,
|
|
||||||
codeQLCmd: "",
|
|
||||||
gitHubVersion: {
|
|
||||||
type: util.GitHubVariant.DOTCOM,
|
|
||||||
} as util.GitHubVersion,
|
|
||||||
dbLocation: "",
|
|
||||||
packs: {
|
|
||||||
javascript: packs,
|
|
||||||
},
|
|
||||||
debugMode: false,
|
|
||||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
|
||||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
|
||||||
augmentationProperties: {
|
|
||||||
injectedMlQueries: false,
|
|
||||||
packsInputCombines: false,
|
|
||||||
queriesInputCombines: false,
|
|
||||||
},
|
|
||||||
trapCaches: {},
|
|
||||||
trapCacheDownloadTime: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
t.is(util.getMlPoweredJsQueriesStatus(config), expectedStatus);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
test("doesDirectoryExist", async (t) => {
|
test("doesDirectoryExist", async (t) => {
|
||||||
// Returns false if no file/dir of this name exists
|
// Returns false if no file/dir of this name exists
|
||||||
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
||||||
|
|||||||
218
src/util.ts
218
src/util.ts
@@ -8,16 +8,10 @@ import del from "del";
|
|||||||
import getFolderSize from "get-folder-size";
|
import getFolderSize from "get-folder-size";
|
||||||
import * as semver from "semver";
|
import * as semver from "semver";
|
||||||
|
|
||||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
|
||||||
import * as apiCompatibility from "./api-compatibility.json";
|
import * as apiCompatibility from "./api-compatibility.json";
|
||||||
import { CodeQL } from "./codeql";
|
import type { CodeQL } from "./codeql";
|
||||||
import {
|
import type { Config, Pack } from "./config-utils";
|
||||||
Config,
|
|
||||||
parsePacksSpecification,
|
|
||||||
prettyPrintPack,
|
|
||||||
} from "./config-utils";
|
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
|
|
||||||
@@ -157,17 +151,18 @@ export async function withTmpDir<T>(
|
|||||||
* from committing too much of the available memory to CodeQL.
|
* from committing too much of the available memory to CodeQL.
|
||||||
* @returns number
|
* @returns number
|
||||||
*/
|
*/
|
||||||
async function getSystemReservedMemoryMegaBytes(
|
function getSystemReservedMemoryMegaBytes(
|
||||||
totalMemoryMegaBytes: number,
|
totalMemoryMegaBytes: number,
|
||||||
features: FeatureEnablement
|
platform: string,
|
||||||
): Promise<number> {
|
isScalingReservedRamEnabled: boolean
|
||||||
|
): number {
|
||||||
// Windows needs more memory for OS processes.
|
// Windows needs more memory for OS processes.
|
||||||
const fixedAmount = 1024 * (process.platform === "win32" ? 1.5 : 1);
|
const fixedAmount = 1024 * (platform === "win32" ? 1.5 : 1);
|
||||||
|
|
||||||
if (await features.getValue(Feature.ScalingReservedRamEnabled)) {
|
if (isScalingReservedRamEnabled) {
|
||||||
// Reserve an additional 2% of the total memory, since the amount used by
|
// Reserve an additional 2.5% of the amount of memory above 8 GB, since the amount used by
|
||||||
// the kernel for page tables scales with the size of physical memory.
|
// the kernel for page tables scales with the size of physical memory.
|
||||||
const scaledAmount = 0.02 * totalMemoryMegaBytes;
|
const scaledAmount = 0.025 * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
|
||||||
return fixedAmount + scaledAmount;
|
return fixedAmount + scaledAmount;
|
||||||
} else {
|
} else {
|
||||||
return fixedAmount;
|
return fixedAmount;
|
||||||
@@ -181,10 +176,12 @@ async function getSystemReservedMemoryMegaBytes(
|
|||||||
*
|
*
|
||||||
* @returns {number} the amount of RAM to use, in megabytes
|
* @returns {number} the amount of RAM to use, in megabytes
|
||||||
*/
|
*/
|
||||||
export async function getMemoryFlagValue(
|
export function getMemoryFlagValueForPlatform(
|
||||||
userInput: string | undefined,
|
userInput: string | undefined,
|
||||||
features: FeatureEnablement
|
totalMemoryBytes: number,
|
||||||
): Promise<number> {
|
platform: string,
|
||||||
|
isScalingReservedRamEnabled: boolean
|
||||||
|
): number {
|
||||||
let memoryToUseMegaBytes: number;
|
let memoryToUseMegaBytes: number;
|
||||||
if (userInput) {
|
if (userInput) {
|
||||||
memoryToUseMegaBytes = Number(userInput);
|
memoryToUseMegaBytes = Number(userInput);
|
||||||
@@ -192,17 +189,36 @@ export async function getMemoryFlagValue(
|
|||||||
throw new Error(`Invalid RAM setting "${userInput}", specified.`);
|
throw new Error(`Invalid RAM setting "${userInput}", specified.`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const totalMemoryBytes = os.totalmem();
|
|
||||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||||
const reservedMemoryMegaBytes = await getSystemReservedMemoryMegaBytes(
|
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes(
|
||||||
totalMemoryMegaBytes,
|
totalMemoryMegaBytes,
|
||||||
features
|
platform,
|
||||||
|
isScalingReservedRamEnabled
|
||||||
);
|
);
|
||||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||||
}
|
}
|
||||||
return Math.floor(memoryToUseMegaBytes);
|
return Math.floor(memoryToUseMegaBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
|
||||||
|
* If no value was specified, the total available memory will be used minus a
|
||||||
|
* threshold reserved for the OS.
|
||||||
|
*
|
||||||
|
* @returns {number} the amount of RAM to use, in megabytes
|
||||||
|
*/
|
||||||
|
export function getMemoryFlagValue(
|
||||||
|
userInput: string | undefined,
|
||||||
|
isScalingReservedRamEnabled: boolean
|
||||||
|
): number {
|
||||||
|
return getMemoryFlagValueForPlatform(
|
||||||
|
userInput,
|
||||||
|
os.totalmem(),
|
||||||
|
process.platform,
|
||||||
|
isScalingReservedRamEnabled
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
* Get the codeql `--ram` flag as configured by the `ram` input. If no value was
|
||||||
* specified, the total available memory will be used minus a threshold
|
* specified, the total available memory will be used minus a threshold
|
||||||
@@ -210,11 +226,11 @@ export async function getMemoryFlagValue(
|
|||||||
*
|
*
|
||||||
* @returns string
|
* @returns string
|
||||||
*/
|
*/
|
||||||
export async function getMemoryFlag(
|
export function getMemoryFlag(
|
||||||
userInput: string | undefined,
|
userInput: string | undefined,
|
||||||
features: FeatureEnablement
|
isScalingReservedRamEnabled: boolean
|
||||||
): Promise<string> {
|
): string {
|
||||||
const megabytes = await getMemoryFlagValue(userInput, features);
|
const megabytes = getMemoryFlagValue(userInput, isScalingReservedRamEnabled);
|
||||||
return `--ram=${megabytes}`;
|
return `--ram=${megabytes}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -337,7 +353,6 @@ export function parseGitHubUrl(inputUrl: string): string {
|
|||||||
return url.toString();
|
return url.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
|
||||||
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR =
|
const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR =
|
||||||
"CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
"CODEQL_ACTION_WARNED_ABOUT_VERSION";
|
||||||
|
|
||||||
@@ -355,37 +370,6 @@ export type GitHubVersion =
|
|||||||
| { type: GitHubVariant.GHE_DOTCOM }
|
| { type: GitHubVariant.GHE_DOTCOM }
|
||||||
| { type: GitHubVariant.GHES; version: string };
|
| { type: GitHubVariant.GHES; version: string };
|
||||||
|
|
||||||
export async function getGitHubVersion(
|
|
||||||
apiDetails: GitHubApiDetails
|
|
||||||
): Promise<GitHubVersion> {
|
|
||||||
// We can avoid making an API request in the standard dotcom case
|
|
||||||
if (parseGitHubUrl(apiDetails.url) === GITHUB_DOTCOM_URL) {
|
|
||||||
return { type: GitHubVariant.DOTCOM };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Doesn't strictly have to be the meta endpoint as we're only
|
|
||||||
// using the response headers which are available on every request.
|
|
||||||
const apiClient = getApiClient();
|
|
||||||
const response = await apiClient.rest.meta.get();
|
|
||||||
|
|
||||||
// This happens on dotcom, although we expect to have already returned in that
|
|
||||||
// case. This can also serve as a fallback in cases we haven't foreseen.
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
|
||||||
return { type: GitHubVariant.DOTCOM };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
|
||||||
return { type: GitHubVariant.GHAE };
|
|
||||||
}
|
|
||||||
|
|
||||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
|
||||||
return { type: GitHubVariant.GHE_DOTCOM };
|
|
||||||
}
|
|
||||||
|
|
||||||
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] as string;
|
|
||||||
return { type: GitHubVariant.GHES, version };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function checkGitHubVersionInRange(
|
export function checkGitHubVersionInRange(
|
||||||
version: GitHubVersion,
|
version: GitHubVersion,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
@@ -573,71 +557,6 @@ export async function supportExpectDiscardedCache(
|
|||||||
return codeQlVersionAbove(codeQL, "2.12.1");
|
return codeQlVersionAbove(codeQL, "2.12.1");
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ML_POWERED_JS_QUERIES_PACK_NAME =
|
|
||||||
"codeql/javascript-experimental-atm-queries";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
|
||||||
* queries beta.
|
|
||||||
*/
|
|
||||||
export async function getMlPoweredJsQueriesPack(
|
|
||||||
codeQL: CodeQL
|
|
||||||
): Promise<string> {
|
|
||||||
let version;
|
|
||||||
if (await codeQlVersionAbove(codeQL, "2.11.3")) {
|
|
||||||
version = "~0.4.0";
|
|
||||||
} else {
|
|
||||||
version = `~0.3.0`;
|
|
||||||
}
|
|
||||||
return prettyPrintPack({
|
|
||||||
name: ML_POWERED_JS_QUERIES_PACK_NAME,
|
|
||||||
version,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get information about ML-powered JS queries to populate status reports with.
|
|
||||||
*
|
|
||||||
* This will be:
|
|
||||||
*
|
|
||||||
* - The version string if the analysis is using a single version of the ML-powered query pack.
|
|
||||||
* - "latest" if the version string of the ML-powered query pack is undefined. This is unlikely to
|
|
||||||
* occur in practice (see comment below).
|
|
||||||
* - "false" if the analysis won't run any ML-powered JS queries.
|
|
||||||
* - "other" in all other cases.
|
|
||||||
*
|
|
||||||
* Our goal of the status report here is to allow us to compare the occurrence of timeouts and other
|
|
||||||
* errors with ML-powered queries turned on and off. We also want to be able to compare minor
|
|
||||||
* version bumps caused by us bumping the version range of `ML_POWERED_JS_QUERIES_PACK` in a new
|
|
||||||
* version of the CodeQL Action. For instance, we might want to compare the `~0.1.0` and `~0.0.2`
|
|
||||||
* version strings.
|
|
||||||
*
|
|
||||||
* This function lives here rather than in `init-action.ts` so it's easier to test, since tests for
|
|
||||||
* `init-action.ts` would each need to live in their own file. See `analyze-action-env.ts` for an
|
|
||||||
* explanation as to why this is.
|
|
||||||
*/
|
|
||||||
export function getMlPoweredJsQueriesStatus(config: Config): string {
|
|
||||||
const mlPoweredJsQueryPacks = (config.packs.javascript || [])
|
|
||||||
.map((p) => parsePacksSpecification(p))
|
|
||||||
.filter(
|
|
||||||
(pack) =>
|
|
||||||
pack.name === "codeql/javascript-experimental-atm-queries" && !pack.path
|
|
||||||
);
|
|
||||||
switch (mlPoweredJsQueryPacks.length) {
|
|
||||||
case 1:
|
|
||||||
// We should always specify an explicit version string in `getMlPoweredJsQueriesPack`,
|
|
||||||
// otherwise we won't be able to make changes to the pack unless those changes are compatible
|
|
||||||
// with each version of the CodeQL Action. Therefore in practice we should only hit the
|
|
||||||
// `latest` case here when customers have explicitly added the ML-powered query pack to their
|
|
||||||
// CodeQL config.
|
|
||||||
return mlPoweredJsQueryPacks[0].version || "latest";
|
|
||||||
case 0:
|
|
||||||
return "false";
|
|
||||||
default:
|
|
||||||
return "other";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Returns whether we are in test mode.
|
* Returns whether we are in test mode.
|
||||||
*
|
*
|
||||||
@@ -647,33 +566,6 @@ export function isInTestMode(): boolean {
|
|||||||
return process.env[EnvVar.TEST_MODE] === "true";
|
return process.env[EnvVar.TEST_MODE] === "true";
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns true if the action should generate a conde-scanning config file
|
|
||||||
* that gets passed to the CLI.
|
|
||||||
*/
|
|
||||||
export async function useCodeScanningConfigInCli(
|
|
||||||
codeql: CodeQL,
|
|
||||||
features: FeatureEnablement
|
|
||||||
): Promise<boolean> {
|
|
||||||
return await features.getValue(Feature.CliConfigFileEnabled, codeql);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function logCodeScanningConfigInCli(
|
|
||||||
codeql: CodeQL,
|
|
||||||
features: FeatureEnablement,
|
|
||||||
logger: Logger
|
|
||||||
) {
|
|
||||||
if (await useCodeScanningConfigInCli(codeql, features)) {
|
|
||||||
logger.info(
|
|
||||||
"Code Scanning configuration file being processed in the codeql CLI."
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
"Code Scanning configuration file being processed in the codeql-action."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Returns whether the path in the argument represents an existing directory.
|
* Returns whether the path in the argument represents an existing directory.
|
||||||
*/
|
*/
|
||||||
@@ -924,3 +816,31 @@ export function fixInvalidNotificationsInFile(
|
|||||||
export function wrapError(error: unknown): Error {
|
export function wrapError(error: unknown): Error {
|
||||||
return error instanceof Error ? error : new Error(String(error));
|
return error instanceof Error ? error : new Error(String(error));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const ML_POWERED_JS_QUERIES_PACK_NAME =
|
||||||
|
"codeql/javascript-experimental-atm-queries";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||||
|
* queries beta.
|
||||||
|
*/
|
||||||
|
export async function getMlPoweredJsQueriesPack(
|
||||||
|
codeQL: CodeQL
|
||||||
|
): Promise<string> {
|
||||||
|
let version;
|
||||||
|
if (await codeQlVersionAbove(codeQL, "2.11.3")) {
|
||||||
|
version = "~0.4.0";
|
||||||
|
} else {
|
||||||
|
version = `~0.3.0`;
|
||||||
|
}
|
||||||
|
return prettyPrintPack({
|
||||||
|
name: ML_POWERED_JS_QUERIES_PACK_NAME,
|
||||||
|
version,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function prettyPrintPack(pack: Pack) {
|
||||||
|
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${
|
||||||
|
pack.path ? `:${pack.path}` : ""
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ export async function getWorkflow(logger: Logger): Promise<Workflow> {
|
|||||||
* Get the absolute path of the currently executing workflow.
|
* Get the absolute path of the currently executing workflow.
|
||||||
*/
|
*/
|
||||||
async function getWorkflowAbsolutePath(logger: Logger): Promise<string> {
|
async function getWorkflowAbsolutePath(logger: Logger): Promise<string> {
|
||||||
const relativePath = await getWorkflowRelativePath();
|
const relativePath = await api.getWorkflowRelativePath();
|
||||||
const absolutePath = path.join(
|
const absolutePath = path.join(
|
||||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||||
relativePath
|
relativePath
|
||||||
@@ -245,69 +245,6 @@ async function getWorkflowAbsolutePath(logger: Logger): Promise<string> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow relative to the repository root.
|
|
||||||
*/
|
|
||||||
export async function getWorkflowRelativePath(): Promise<string> {
|
|
||||||
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
|
||||||
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request(
|
|
||||||
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
export function getWorkflowRunID(): number {
|
|
||||||
const workflowRunIdString = getRequiredEnvParam("GITHUB_RUN_ID");
|
|
||||||
const workflowRunID = parseInt(workflowRunIdString, 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error(
|
|
||||||
`GITHUB_RUN_ID must define a non NaN workflow run ID. Current value is ${workflowRunIdString}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (workflowRunID < 0) {
|
|
||||||
throw new Error(
|
|
||||||
`GITHUB_RUN_ID must be a non-negative integer. Current value is ${workflowRunIdString}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the workflow run attempt number.
|
|
||||||
*/
|
|
||||||
export function getWorkflowRunAttempt(): number {
|
|
||||||
const workflowRunAttemptString = getRequiredEnvParam("GITHUB_RUN_ATTEMPT");
|
|
||||||
const workflowRunAttempt = parseInt(workflowRunAttemptString, 10);
|
|
||||||
if (Number.isNaN(workflowRunAttempt)) {
|
|
||||||
throw new Error(
|
|
||||||
`GITHUB_RUN_ATTEMPT must define a non NaN workflow run attempt. Current value is ${workflowRunAttemptString}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (workflowRunAttempt <= 0) {
|
|
||||||
throw new Error(
|
|
||||||
`GITHUB_RUN_ATTEMPT must be a positive integer. Current value is ${workflowRunAttemptString}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return workflowRunAttempt;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getStepsCallingAction(
|
function getStepsCallingAction(
|
||||||
job: WorkflowJob,
|
job: WorkflowJob,
|
||||||
actionName: string
|
actionName: string
|
||||||
|
|||||||
Reference in New Issue
Block a user