mirror of
https://github.com/github/codeql-action.git
synced 2025-12-07 16:28:15 +08:00
Compare commits
39 Commits
codeql-bun
...
v2.21.4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a09933a12a | ||
|
|
37116fb629 | ||
|
|
c613917766 | ||
|
|
492a68c323 | ||
|
|
ac49314877 | ||
|
|
ac35d7a02d | ||
|
|
d03c744ad6 | ||
|
|
a0407a8c60 | ||
|
|
8a7b2e9c9b | ||
|
|
9a510d9b07 | ||
|
|
2160dd3b55 | ||
|
|
1e14fd9e7a | ||
|
|
2ec74e3c0e | ||
|
|
25a42462f9 | ||
|
|
f5920c85ab | ||
|
|
6ed1ccdc9a | ||
|
|
ceb4b69c73 | ||
|
|
76584bd0a0 | ||
|
|
fba32955a0 | ||
|
|
f4eba74c80 | ||
|
|
9e4932e291 | ||
|
|
bd20e2b607 | ||
|
|
c93877b070 | ||
|
|
4e9f8a2be4 | ||
|
|
a07ea2da0c | ||
|
|
9a202d29ad | ||
|
|
5b6282e01c | ||
|
|
f0f7a35b85 | ||
|
|
dda4ed3db4 | ||
|
|
834f08becb | ||
|
|
97ce1b094a | ||
|
|
ba5812e64f | ||
|
|
3b25789080 | ||
|
|
7dcb3e5276 | ||
|
|
09ce3dbf90 | ||
|
|
c6d284324b | ||
|
|
c55207f0a2 | ||
|
|
45e889b4b3 | ||
|
|
131cb1abb1 |
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -6,5 +6,11 @@
|
||||
|
||||
// transpiled JavaScript
|
||||
"lib": true,
|
||||
}
|
||||
},
|
||||
// Installing a new Node package often triggers VS Code's git limit warnings as there is typically
|
||||
// an intermediate stage where many files are modified. This setting suppresses these warnings.
|
||||
"git.ignoreLimitWarning": true,
|
||||
// Use the vendored TypeScript version to have a consistent development experience across
|
||||
// machines.
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
}
|
||||
|
||||
11
CHANGELOG.md
11
CHANGELOG.md
@@ -2,9 +2,18 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## [UNRELEASED]
|
||||
## 2.21.4 - 14 Aug 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.14.2. [#1831](https://github.com/github/codeql-action/pull/1831)
|
||||
- Log a warning if the amount of available disk space runs low during a code scanning run. [#1825](https://github.com/github/codeql-action/pull/1825)
|
||||
- When downloading CodeQL bundle version 2.13.4 and later, cache these bundles in the Actions tool cache using a simpler version number. [#1832](https://github.com/github/codeql-action/pull/1832)
|
||||
- Fix an issue that first appeared in CodeQL Action v2.21.2 that prevented CodeQL invocations from being logged. [#1833](https://github.com/github/codeql-action/pull/1833)
|
||||
- We are rolling out a feature in August 2023 that will improve the quality of file coverage information. [#1835](https://github.com/github/codeql-action/pull/1835)
|
||||
|
||||
## 2.21.3 - 08 Aug 2023
|
||||
|
||||
- We are rolling out a feature in August 2023 that will improve multi-threaded performance on larger runners. [#1817](https://github.com/github/codeql-action/pull/1817)
|
||||
- We are rolling out a feature in August 2023 that adds beta support for [Project Lombok](https://projectlombok.org/) when analyzing Java. [#1809](https://github.com/github/codeql-action/pull/1809)
|
||||
- Reduce disk space usage when downloading the CodeQL bundle. [#1820](https://github.com/github/codeql-action/pull/1820)
|
||||
|
||||
## 2.21.2 - 28 Jul 2023
|
||||
|
||||
11
lib/actions-util.js
generated
11
lib/actions-util.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getWorkflowRunAttempt = exports.getWorkflowRunID = exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.getWorkflowEventName = exports.getActionVersion = exports.getActionsStatus = exports.getRef = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getWorkflowRunAttempt = exports.getWorkflowRunID = exports.getUploadValue = exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.getWorkflowEventName = exports.getActionVersion = exports.getRef = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
@@ -220,15 +220,6 @@ function getRefFromEnv() {
|
||||
}
|
||||
return refEnv;
|
||||
}
|
||||
function getActionsStatus(error, otherFailureCause) {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof util_1.UserError ? "user-error" : "failure";
|
||||
}
|
||||
else {
|
||||
return "success";
|
||||
}
|
||||
}
|
||||
exports.getActionsStatus = getActionsStatus;
|
||||
function getActionVersion() {
|
||||
return pkg.version;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
33
lib/actions-util.test.js
generated
33
lib/actions-util.test.js
generated
@@ -213,37 +213,4 @@ const util_1 = require("./util");
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("createStatusReportBase", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
process.env["GITHUB_RUN_ID"] = "100";
|
||||
process.env["GITHUB_RUN_ATTEMPT"] = "2";
|
||||
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||
process.env["RUNNER_OS"] = "macOS";
|
||||
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||
const statusReport = await (0, api_client_1.createStatusReportBase)("init", "failure", new Date("May 19, 2023 05:19:00"), "failure cause", "exception stack trace");
|
||||
t.assert(typeof statusReport.job_run_uuid === "string");
|
||||
t.assert(statusReport.workflow_run_id === 100);
|
||||
t.assert(statusReport.workflow_run_attempt === 2);
|
||||
t.assert(statusReport.workflow_name === (process.env["GITHUB_WORKFLOW"] || ""));
|
||||
t.assert(statusReport.job_name === (process.env["GITHUB_JOB"] || ""));
|
||||
t.assert(statusReport.analysis_key === "analysis-key");
|
||||
t.assert(statusReport.commit_oid === process.env["GITHUB_SHA"]);
|
||||
t.assert(statusReport.ref === process.env["GITHUB_REF"]);
|
||||
t.assert(statusReport.action_name === "init");
|
||||
t.assert(statusReport.action_oid === "unknown");
|
||||
t.assert(statusReport.started_at === process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT]);
|
||||
t.assert(statusReport.action_started_at ===
|
||||
new Date("May 19, 2023 05:19:00").toISOString());
|
||||
t.assert(statusReport.status === "failure");
|
||||
t.assert(statusReport.cause === "failure cause");
|
||||
t.assert(statusReport.exception === "exception stack trace");
|
||||
t.assert(statusReport.runner_os === process.env["RUNNER_OS"]);
|
||||
t.assert(typeof statusReport.action_version === "string");
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
5
lib/analyze-action-env.test.js
generated
5
lib/analyze-action-env.test.js
generated
@@ -32,6 +32,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
@@ -47,9 +48,9 @@ const util = __importStar(require("./util"));
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(api, "createStatusReportBase")
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves(true);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,GAAG,EAAE,wBAAwB,CAAC;aACnC,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnD,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-env.test.js","sourceRoot":"","sources":["../src/analyze-action-env.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,8DAA8D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC/E,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAEnE,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,uEAAuE;QACvE,0EAA0E;QAC1E,iBAAiB;QACjB,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
5
lib/analyze-action-input.test.js
generated
5
lib/analyze-action-input.test.js
generated
@@ -32,6 +32,7 @@ const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze = __importStar(require("./analyze"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
@@ -47,9 +48,9 @@ const util = __importStar(require("./util"));
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(api, "createStatusReportBase")
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({});
|
||||
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,GAAG,EAAE,wBAAwB,CAAC;aACnC,QAAQ,CAAC,EAAkC,CAAC,CAAC;QAChD,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnD,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze-action-input.test.js","sourceRoot":"","sources":["../src/analyze-action-input.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,mDAAqC;AACrC,kDAAoC;AACpC,4DAA8C;AAC9C,8DAAgD;AAChD,mDAIyB;AACzB,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,4EAA4E;AAC5E,4EAA4E;AAC5E,+EAA+E;AAC/E,+EAA+E;AAC/E,gFAAgF;AAChF,iCAAiC;AAEjC,IAAA,aAAI,EAAC,sDAAsD,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvE,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,iBAAiB,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,sCAAsC,CAAC;QAC1E,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,wBAAwB,CAAC;QACzD,KAAK;aACF,IAAI,CAAC,YAAY,EAAE,wBAAwB,CAAC;aAC5C,QAAQ,CAAC,EAAmC,CAAC,CAAC;QACjD,KAAK,CAAC,IAAI,CAAC,YAAY,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QAC5D,MAAM,aAAa,GAAuB;YACxC,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,MAAM;SAChC,CAAC;QACF,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC;YAC5C,aAAa;YACb,SAAS,EAAE,EAAE;YACb,KAAK,EAAE,EAAE;YACT,UAAU,EAAE,EAAE;SACkB,CAAC,CAAC;QACpC,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;QAC1D,iBAAiB,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC/D,MAAM,iBAAiB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACtE,iBAAiB,CAAC,QAAQ,CAAC,eAAe,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,CAAC;QAC5D,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,0BAA0B,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnE,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,IAAA,0CAA0B,EAAC,GAAG,EAAE,EAAE,CAAC,CAAC;QAEpC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAC,GAAG,GAAG,CAAC;QACpC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC;QAEnC,4DAA4D;QAC5D,iBAAiB,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACpD,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,eAAe,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QAC3D,MAAM,cAAc,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACzD,MAAM,aAAa,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElD,uEAAuE;QACvE,oEAAoE;QACpE,4EAA4E;QAC5E,wEAAwE;QACxE,MAAM,aAAa,CAAC,UAAU,CAAC;QAE/B,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC/D,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;QAC7D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;QAC9D,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
20
lib/analyze-action.js
generated
20
lib/analyze-action.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.runPromise = exports.sendStatusReport = void 0;
|
||||
exports.runPromise = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
@@ -34,7 +34,6 @@ const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api_client_1 = require("./api-client");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
@@ -44,14 +43,16 @@ const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const statusReport = __importStar(require("./status-report"));
|
||||
const status_report_1 = require("./status-report");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger) {
|
||||
const status = actionsUtil.getActionsStatus(error, stats?.analyze_failure_language);
|
||||
const statusReportBase = await api.createStatusReportBase("finish", status, startedAt, error?.message, error?.stack);
|
||||
const statusReport = {
|
||||
const status = (0, status_report_1.getActionsStatus)(error, stats?.analyze_failure_language);
|
||||
const statusReportBase = await (0, status_report_1.createStatusReportBase)("finish", status, startedAt, await util.checkDiskUsage(), error?.message, error?.stack);
|
||||
const report = {
|
||||
...statusReportBase,
|
||||
...(config
|
||||
? {
|
||||
@@ -63,17 +64,16 @@ async function sendStatusReport(startedAt, config, stats, error, trapCacheUpload
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport = {
|
||||
...statusReport,
|
||||
...report,
|
||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||
trap_cache_upload_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
||||
};
|
||||
await api.sendStatusReport(trapCacheUploadStatusReport);
|
||||
await statusReport.sendStatusReport(trapCacheUploadStatusReport);
|
||||
}
|
||||
else {
|
||||
await api.sendStatusReport(statusReport);
|
||||
await statusReport.sendStatusReport(report);
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
// `expect-error` should only be set to a non-false value by the CodeQL Action PR checks.
|
||||
function hasBadExpectErrorInput() {
|
||||
return (actionsUtil.getOptionalInput("expect-error") !== "false" &&
|
||||
@@ -146,7 +146,7 @@ async function run() {
|
||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
try {
|
||||
if (!(await api.sendStatusReport(await api.createStatusReportBase("finish", "starting", startedAt)))) {
|
||||
if (!(await statusReport.sendStatusReport(await (0, status_report_1.createStatusReportBase)("finish", "starting", startedAt, await util.checkDiskUsage(logger))))) {
|
||||
return;
|
||||
}
|
||||
config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/analyze.js
generated
2
lib/analyze.js
generated
@@ -232,7 +232,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
}
|
||||
statusReport["event_reports"].push(perQueryAlertCountEventReport);
|
||||
}
|
||||
if (!(await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, codeql))) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.AnalysisSummaryV2Enabled, codeql))) {
|
||||
await runPrintLinesOfCode(language);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
157
lib/api-client.js
generated
157
lib/api-client.js
generated
@@ -26,14 +26,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRelativePath = exports.sendStatusReport = exports.createStatusReportBase = exports.getGitHubVersion = exports.getGitHubVersionFromApi = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
|
||||
const os = __importStar(require("os"));
|
||||
exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRelativePath = exports.getGitHubVersion = exports.getGitHubVersionFromApi = exports.getApiClientWithExternalAuth = exports.getApiClient = exports.getApiDetails = exports.DisallowedAPIVersionReason = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const githubUtils = __importStar(require("@actions/github/lib/utils"));
|
||||
const retry = __importStar(require("@octokit/plugin-retry"));
|
||||
const console_log_level_1 = __importDefault(require("console-log-level"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const environment_1 = require("./environment");
|
||||
const util_1 = require("./util");
|
||||
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
|
||||
var DisallowedAPIVersionReason;
|
||||
@@ -104,159 +102,6 @@ async function getGitHubVersion() {
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
exports.getGitHubVersion = getGitHubVersion;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await (0, actions_util_1.getRef)();
|
||||
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
||||
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(environment_1.EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[environment_1.EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(environment_1.EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
const statusReport = {
|
||||
job_run_uuid: jobRunUUID,
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key,
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
action_name: actionName,
|
||||
action_ref: actionRef,
|
||||
action_oid: "unknown",
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
runner_os: runnerOs,
|
||||
action_version: (0, actions_util_1.getActionVersion)(),
|
||||
};
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error") {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = getApiClient();
|
||||
try {
|
||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||
owner,
|
||||
repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
if ((0, util_1.isHTTPError)(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if ((0, actions_util_1.getWorkflowEventName)() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
|
||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
||||
}
|
||||
else {
|
||||
core.setFailed(e.message || GENERIC_403_MSG);
|
||||
}
|
||||
return false;
|
||||
case 404:
|
||||
core.setFailed(GENERIC_404_MSG);
|
||||
return false;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
// this means that this action version is no longer compatible with the API
|
||||
// we still want to continue as it is likely the analysis endpoint will work
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||
core.debug(INCOMPATIBLE_MSG);
|
||||
}
|
||||
else {
|
||||
core.debug(OUT_OF_DATE_MSG);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.error("An unexpected error occurred when sending code scanning status report.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
/**
|
||||
* Get the path of the currently executing workflow relative to the repository root.
|
||||
*/
|
||||
|
||||
File diff suppressed because one or more lines are too long
15
lib/autobuild-action.js
generated
15
lib/autobuild-action.js
generated
@@ -31,17 +31,18 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const environment_1 = require("./environment");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const status_report_1 = require("./status-report");
|
||||
const util_1 = require("./util");
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
async function sendCompletedStatusReport(logger, startedAt, allLanguages, failingLanguage, cause) {
|
||||
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
||||
const status = (0, actions_util_1.getActionsStatus)(cause, failingLanguage);
|
||||
const statusReportBase = await (0, api_client_1.createStatusReportBase)("autobuild", status, startedAt, cause?.message, cause?.stack);
|
||||
const status = (0, status_report_1.getActionsStatus)(cause, failingLanguage);
|
||||
const statusReportBase = await (0, status_report_1.createStatusReportBase)("autobuild", status, startedAt, await (0, util_1.checkDiskUsage)(logger), cause?.message, cause?.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(","),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||
await (0, status_report_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
@@ -49,7 +50,7 @@ async function run() {
|
||||
let currentLanguage = undefined;
|
||||
let languages = undefined;
|
||||
try {
|
||||
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("autobuild", "starting", startedAt)))) {
|
||||
if (!(await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("autobuild", "starting", startedAt, await (0, util_1.checkDiskUsage)(logger))))) {
|
||||
return;
|
||||
}
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
@@ -77,10 +78,10 @@ async function run() {
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
core.setFailed(`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`);
|
||||
await sendCompletedStatusReport(startedAt, languages ?? [], currentLanguage, error);
|
||||
await sendCompletedStatusReport(logger, startedAt, languages ?? [], currentLanguage, error);
|
||||
return;
|
||||
}
|
||||
await sendCompletedStatusReport(startedAt, languages ?? []);
|
||||
await sendCompletedStatusReport(logger, startedAt, languages ?? []);
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAMwB;AACxB,6CAIsB;AACtB,2CAAwE;AACxE,4DAA8C;AAC9C,+CAAuC;AACvC,2CAAuC;AACvC,uCAA6C;AAC7C,iCAIgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;iBAC1D;aACF;SACF;KACF;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KACxE;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,+CAAuC;AACvC,2CAAuC;AACvC,uCAAqD;AACrD,mDAKyB;AACzB,iCAKgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,MAAc,EACd,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,gCAAgB,EACtB,MAAM,IAAA,sCAAsB,EAC1B,WAAW,EACX,UAAU,EACV,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,CAC7B,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,oBAAoB,EAAE,MAAM,CAAC,CAAC;iBAC1D;aACF;SACF;KACF;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CACZ,kIAAkI,KAAK,CAAC,OAAO,EAAE,CAClJ,CAAC;QACF,MAAM,yBAAyB,CAC7B,MAAM,EACN,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,CACN,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AACtE,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KACxE;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
8
lib/codeql.js
generated
8
lib/codeql.js
generated
@@ -278,6 +278,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_INIT_WITH_QLCONFIG))) {
|
||||
extraArgs.push(`--qlconfig-file=${qlconfigFile}`);
|
||||
}
|
||||
if (await features.getValue(feature_flags_1.Feature.LanguageBaselineConfigEnabled, this)) {
|
||||
extraArgs.push("--calculate-language-specific-baseline");
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
@@ -485,10 +488,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
else if (await util.codeQlVersionAbove(this, "2.12.4")) {
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, this)) {
|
||||
if (await features.getValue(feature_flags_1.Feature.AnalysisSummaryV2Enabled, this)) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
}
|
||||
else if (await util.codeQlVersionAbove(this, feature_flags_1.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)) {
|
||||
else if (await util.codeQlVersionAbove(this, feature_flags_1.CODEQL_VERSION_ANALYSIS_SUMMARY_V2)) {
|
||||
codeqlArgs.push("--no-new-analysis-summary");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
@@ -729,6 +732,7 @@ const maxErrorSize = 20000;
|
||||
async function runTool(cmd, args = [], opts = {}) {
|
||||
let output = "";
|
||||
let error = "";
|
||||
process.stdout.write(`[command]${cmd} ${args.join(" ")}\n`);
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
ignoreReturnCode: true,
|
||||
listeners: {
|
||||
|
||||
File diff suppressed because one or more lines are too long
17
lib/codeql.test.js
generated
17
lib/codeql.test.js
generated
@@ -131,6 +131,21 @@ function mockApiDetails(apiDetails) {
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("caches semantically versioned bundles using their semantic version number", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const url = (0, testing_utils_1.mockBundleDownloadApi)({
|
||||
tagName: `codeql-bundle-v2.14.0`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(url, testing_utils_1.SAMPLE_DOTCOM_API_DETAILS, tmpDir, util.GitHubVariant.DOTCOM, testing_utils_1.SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 1);
|
||||
t.assert(toolcache.find("CodeQL", `2.14.0`));
|
||||
t.is(result.toolsVersion, `2.14.0`);
|
||||
t.is(result.toolsSource, setup_codeql_1.ToolsSource.Download);
|
||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
@@ -699,7 +714,7 @@ for (const { featureEnabled, codeqlVersion, flagPassed, negativeFlagPassed, } of
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)(featureEnabled ? [feature_flags_1.Feature.NewAnalysisSummaryEnabled] : []), (0, logging_1.getRunnerLogger)(true));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)(featureEnabled ? [feature_flags_1.Feature.AnalysisSummaryV2Enabled] : []), (0, logging_1.getRunnerLogger)(true));
|
||||
t.is(runnerConstructorStub.firstCall.args[1].includes("--new-analysis-summary"), flagPassed, `--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`);
|
||||
t.is(runnerConstructorStub.firstCall.args[1].includes("--no-new-analysis-summary"), negativeFlagPassed, `--no-new-analysis-summary should${negativeFlagPassed ? "" : "n't"} be passed`);
|
||||
});
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.14.1",
|
||||
"cliVersion": "2.14.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.14.0",
|
||||
"priorCliVersion": "2.14.0"
|
||||
"bundleVersion": "codeql-bundle-v2.14.2",
|
||||
"cliVersion": "2.14.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.14.1",
|
||||
"priorCliVersion": "2.14.1"
|
||||
}
|
||||
|
||||
29
lib/environment.js
generated
29
lib/environment.js
generated
@@ -3,42 +3,37 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.EnvVar = void 0;
|
||||
var EnvVar;
|
||||
(function (EnvVar) {
|
||||
/** Set to true when the `analyze` Action completes successfully. */
|
||||
/** Whether the `analyze` Action completes successfully. */
|
||||
EnvVar["ANALYZE_DID_COMPLETE_SUCCESSFULLY"] = "CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY";
|
||||
/** Set to "true" when the CodeQL Action has invoked the Go autobuilder. */
|
||||
/** Whether the CodeQL Action has invoked the Go autobuilder. */
|
||||
EnvVar["DID_AUTOBUILD_GOLANG"] = "CODEQL_ACTION_DID_AUTOBUILD_GOLANG";
|
||||
/**
|
||||
* Used to disable the SARIF post-processing in the Action that removes duplicate locations from
|
||||
* Whether to disable the SARIF post-processing in the Action that removes duplicate locations from
|
||||
* notifications in the `run[].invocations[].toolExecutionNotifications` SARIF property.
|
||||
*/
|
||||
EnvVar["DISABLE_DUPLICATE_LOCATION_FIX"] = "CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX";
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action is using its
|
||||
* own deprecated and non-standard way of scanning for multiple
|
||||
* languages.
|
||||
* Whether the CodeQL Action is using its own deprecated and non-standard way of scanning for
|
||||
* multiple languages.
|
||||
*/
|
||||
EnvVar["FEATURE_MULTI_LANGUAGE"] = "CODEQL_ACTION_FEATURE_MULTI_LANGUAGE";
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action is using its
|
||||
* own sandwiched workflow mechanism.
|
||||
*/
|
||||
/** Whether the CodeQL Action is using its own sandwiched workflow mechanism. */
|
||||
EnvVar["FEATURE_SANDWICH"] = "CODEQL_ACTION_FEATURE_SANDWICH";
|
||||
/**
|
||||
* If set to a truthy value, then the CodeQL Action might combine SARIF
|
||||
* output from several `interpret-results` runs for the same language.
|
||||
* Whether the CodeQL Action might combine SARIF output from several `interpret-results` runs for
|
||||
* the same language.
|
||||
*/
|
||||
EnvVar["FEATURE_SARIF_COMBINE"] = "CODEQL_ACTION_FEATURE_SARIF_COMBINE";
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action will upload SARIF,
|
||||
* not the CLI.
|
||||
*/
|
||||
/** Whether the CodeQL Action will upload SARIF, not the CLI. */
|
||||
EnvVar["FEATURE_WILL_UPLOAD"] = "CODEQL_ACTION_FEATURE_WILL_UPLOAD";
|
||||
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
||||
EnvVar["HAS_WARNED_ABOUT_DISK_SPACE"] = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE";
|
||||
/** UUID representing the current job run. */
|
||||
EnvVar["JOB_RUN_UUID"] = "JOB_RUN_UUID";
|
||||
EnvVar["ODASA_TRACER_CONFIGURATION"] = "ODASA_TRACER_CONFIGURATION";
|
||||
/** Whether to suppress the warning if the current CLI will soon be unsupported. */
|
||||
EnvVar["SUPPRESS_DEPRECATED_SOON_WARNING"] = "CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING";
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
/** Whether to disable uploading SARIF results or status reports to the GitHub API */
|
||||
EnvVar["TEST_MODE"] = "CODEQL_ACTION_TEST_MODE";
|
||||
EnvVar["TESTING_ENVIRONMENT"] = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
/** Semver of the CodeQL Action as specified in `package.json`. */
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA,IAAY,MA8DX;AA9DD,WAAY,MAAM;IAChB,oEAAoE;IACpE,+FAAqF,CAAA;IAErF,2EAA2E;IAC3E,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;;OAIG;IACH,yEAA+D,CAAA;IAE/D;;;OAGG;IACH,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D;;;OAGG;IACH,mEAAyD,CAAA;IAEzD,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,mEAAyD,CAAA;IAEzD,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,kFAAkF;IAClF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;AACpD,CAAC,EA9DW,MAAM,sBAAN,MAAM,QA8DjB"}
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA,IAAY,MA0DX;AA1DD,WAAY,MAAM;IAChB,2DAA2D;IAC3D,+FAAqF,CAAA;IAErF,gEAAgE;IAChE,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;OAGG;IACH,yEAA+D,CAAA;IAE/D,gFAAgF;IAChF,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D,gEAAgE;IAChE,mEAAyD,CAAA;IAEzD,kFAAkF;IAClF,mFAAyE,CAAA;IAEzE,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,mEAAyD,CAAA;IAEzD,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,qFAAqF;IACrF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;AACpD,CAAC,EA1DW,MAAM,sBAAN,MAAM,QA0DjB"}
|
||||
26
lib/feature-flags.js
generated
26
lib/feature-flags.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = exports.CODEQL_VERSION_INTRA_LAYER_PARALLELISM = exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = void 0;
|
||||
exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.Features = exports.FEATURE_FLAGS_FILE_NAME = exports.featureConfig = exports.Feature = exports.CODEQL_VERSION_LANGUAGE_BASELINE_CONFIG = exports.CODEQL_VERSION_INTRA_LAYER_PARALLELISM = exports.CODEQL_VERSION_ANALYSIS_SUMMARY_V2 = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const semver = __importStar(require("semver"));
|
||||
@@ -39,11 +39,15 @@ exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
||||
/**
|
||||
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
||||
exports.CODEQL_VERSION_ANALYSIS_SUMMARY_V2 = "2.14.0";
|
||||
/**
|
||||
* Versions 2.14.0+ of the CodeQL CLI support intra-layer parallelism (aka fine-grained parallelism) options.
|
||||
*/
|
||||
exports.CODEQL_VERSION_INTRA_LAYER_PARALLELISM = "2.14.0";
|
||||
/**
|
||||
* Versions 2.14.2+ of the CodeQL CLI support language-specific baseline configuration.
|
||||
*/
|
||||
exports.CODEQL_VERSION_LANGUAGE_BASELINE_CONFIG = "2.14.2";
|
||||
/**
|
||||
* Feature enablement as returned by the GitHub API endpoint.
|
||||
*
|
||||
@@ -51,19 +55,25 @@ exports.CODEQL_VERSION_INTRA_LAYER_PARALLELISM = "2.14.0";
|
||||
*/
|
||||
var Feature;
|
||||
(function (Feature) {
|
||||
Feature["AnalysisSummaryV2Enabled"] = "analysis_summary_v2_enabled";
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["CodeqlJavaLombokEnabled"] = "codeql_java_lombok_enabled";
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["DisablePythonDependencyInstallationEnabled"] = "disable_python_dependency_installation_enabled";
|
||||
Feature["EvaluatorIntraLayerParallelismEnabled"] = "evaluator_intra_layer_parallelism_enabled";
|
||||
Feature["ExportDiagnosticsEnabled"] = "export_diagnostics_enabled";
|
||||
Feature["LanguageBaselineConfigEnabled"] = "language_baseline_config_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["NewAnalysisSummaryEnabled"] = "new_analysis_summary_enabled";
|
||||
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
|
||||
Feature["ScalingReservedRamEnabled"] = "scaling_reserved_ram_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature || (exports.Feature = Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.AnalysisSummaryV2Enabled]: {
|
||||
envVar: "CODEQL_ACTION_ANALYSIS_SUMMARY_V2",
|
||||
minimumVersion: exports.CODEQL_VERSION_ANALYSIS_SUMMARY_V2,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.CodeqlJavaLombokEnabled]: {
|
||||
envVar: "CODEQL_JAVA_LOMBOK",
|
||||
minimumVersion: "2.14.0",
|
||||
@@ -89,16 +99,16 @@ exports.featureConfig = {
|
||||
minimumVersion: "2.12.4",
|
||||
defaultValue: true,
|
||||
},
|
||||
[Feature.LanguageBaselineConfigEnabled]: {
|
||||
envVar: "CODEQL_ACTION_LANGUAGE_BASELINE_CONFIG",
|
||||
minimumVersion: exports.CODEQL_VERSION_LANGUAGE_BASELINE_CONFIG,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.NewAnalysisSummaryEnabled]: {
|
||||
envVar: "CODEQL_ACTION_NEW_ANALYSIS_SUMMARY",
|
||||
minimumVersion: exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.QaTelemetryEnabled]: {
|
||||
envVar: "CODEQL_ACTION_QA_TELEMETRY",
|
||||
minimumVersion: undefined,
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/init-action-post.js
generated
7
lib/init-action-post.js
generated
@@ -36,6 +36,7 @@ const feature_flags_1 = require("./feature-flags");
|
||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const status_report_1 = require("./status-report");
|
||||
const util_1 = require("./util");
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
@@ -51,15 +52,15 @@ async function runWrapper() {
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init-post", (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
||||
await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("init-post", (0, status_report_1.getActionsStatus)(error), startedAt, await (0, util_1.checkDiskUsage)(), error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
const statusReportBase = await (0, api_client_1.createStatusReportBase)("init-post", "success", startedAt);
|
||||
const statusReportBase = await (0, status_report_1.createStatusReportBase)("init-post", "success", startedAt, await (0, util_1.checkDiskUsage)());
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
};
|
||||
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||
await (0, status_report_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
void runWrapper();
|
||||
//# sourceMappingURL=init-action-post.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAKwB;AACxB,6CAIsB;AACtB,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAIgB;AAMhB,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,iDAAuE;AACvE,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,mDAKyB;AACzB,iCAKgB;AAMhB,KAAK,UAAU,UAAU;IACvB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,uBAES,CAAC;IACd,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,IAAA,oCAAqB,GAAE,EACvB,MAAM,CACP,CAAC;QAEF,uBAAuB,GAAG,MAAM,oBAAoB,CAAC,GAAG,CACtD,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,6BAAc,EACd,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAE9B,MAAM,IAAA,gCAAgB,EACpB,MAAM,IAAA,sCAAsB,EAC1B,WAAW,EACX,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,EACtB,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;IACD,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,WAAW,EACX,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,CACvB,CAAC;IACF,MAAM,YAAY,GAAyB;QACzC,GAAG,gBAAgB;QACnB,GAAG,uBAAuB;KAC3B,CAAC;IACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
11
lib/init-action.js
generated
11
lib/init-action.js
generated
@@ -36,11 +36,12 @@ const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const setup_codeql_1 = require("./setup-codeql");
|
||||
const status_report_1 = require("./status-report");
|
||||
const trap_caching_1 = require("./trap-caching");
|
||||
const util_1 = require("./util");
|
||||
const workflow_1 = require("./workflow");
|
||||
async function sendCompletedStatusReport(startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger, error) {
|
||||
const statusReportBase = await (0, api_client_1.createStatusReportBase)("init", (0, actions_util_1.getActionsStatus)(error), startedAt, error?.message, error?.stack);
|
||||
const statusReportBase = await (0, status_report_1.createStatusReportBase)("init", (0, status_report_1.getActionsStatus)(error), startedAt, await (0, util_1.checkDiskUsage)(logger), error?.message, error?.stack);
|
||||
const workflowLanguages = (0, actions_util_1.getOptionalInput)("languages");
|
||||
const initStatusReport = {
|
||||
...statusReportBase,
|
||||
@@ -88,13 +89,13 @@ async function sendCompletedStatusReport(startedAt, config, toolsDownloadDuratio
|
||||
trap_cache_download_size_bytes: Math.round(await (0, trap_caching_1.getTotalCacheSize)(config.trapCaches, logger)),
|
||||
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
|
||||
};
|
||||
await (0, api_client_1.sendStatusReport)({
|
||||
await (0, status_report_1.sendStatusReport)({
|
||||
...initWithConfigStatusReport,
|
||||
...initToolsDownloadFields,
|
||||
});
|
||||
}
|
||||
else {
|
||||
await (0, api_client_1.sendStatusReport)({ ...initStatusReport, ...initToolsDownloadFields });
|
||||
await (0, status_report_1.sendStatusReport)({ ...initStatusReport, ...initToolsDownloadFields });
|
||||
}
|
||||
}
|
||||
async function run() {
|
||||
@@ -121,7 +122,7 @@ async function run() {
|
||||
core.exportVariable(environment_1.EnvVar.JOB_RUN_UUID, (0, uuid_1.v4)());
|
||||
try {
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)(logger);
|
||||
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
if (!(await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("init", "starting", startedAt, await (0, util_1.checkDiskUsage)(logger), workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(gitHubVersion.type);
|
||||
@@ -156,7 +157,7 @@ async function run() {
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("init", error instanceof util_1.UserError ? "user-error" : "aborted", startedAt, error.message, error.stack));
|
||||
await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("init", error instanceof util_1.UserError ? "user-error" : "aborted", startedAt, await (0, util_1.checkDiskUsage)(), error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/resolve-environment-action.js
generated
7
lib/resolve-environment-action.js
generated
@@ -31,6 +31,7 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const resolve_environment_1 = require("./resolve-environment");
|
||||
const status_report_1 = require("./status-report");
|
||||
const util_1 = require("./util");
|
||||
const ACTION_NAME = "resolve-environment";
|
||||
const ENVIRONMENT_OUTPUT_NAME = "environment";
|
||||
@@ -39,7 +40,7 @@ async function run() {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
const language = (0, languages_1.resolveAlias)((0, actions_util_1.getRequiredInput)("language"));
|
||||
try {
|
||||
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, "starting", startedAt)))) {
|
||||
if (!(await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)(ACTION_NAME, "starting", startedAt, await (0, util_1.checkDiskUsage)(logger))))) {
|
||||
return;
|
||||
}
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
@@ -63,11 +64,11 @@ async function run() {
|
||||
else {
|
||||
// For any other error types, something has more seriously gone wrong and we fail.
|
||||
core.setFailed(`Failed to resolve a build environment suitable for automatically building your code. ${error.message}`);
|
||||
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, (0, actions_util_1.getActionsStatus)(error), startedAt, error.message, error.stack));
|
||||
await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)(ACTION_NAME, (0, status_report_1.getActionsStatus)(error), startedAt, await (0, util_1.checkDiskUsage)(), error.message, error.stack));
|
||||
}
|
||||
return;
|
||||
}
|
||||
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)(ACTION_NAME, "success", startedAt));
|
||||
await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)(ACTION_NAME, "success", startedAt, await (0, util_1.checkDiskUsage)()));
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAIsB;AACtB,qCAAkD;AAClD,4DAA8C;AAC9C,2CAAqD;AACrD,uCAA6C;AAC7C,+DAAmE;AACnE,iCAA+E;AAE/E,MAAM,WAAW,GAAG,qBAAqB,CAAC;AAC1C,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,QAAQ,GAAa,IAAA,wBAAY,EAAC,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAAC,CAAC;IAEtE,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,QAAQ,CACT,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;KACjD;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,+BAAsB,EAAE;YAC3C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;SACH;aAAM;YACL,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,WAAW,EACX,IAAA,+BAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;SACH;QAED,OAAO;KACR;IAED,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAAC,WAAW,EAAE,SAAS,EAAE,SAAS,CAAC,CAChE,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,GAAG,WAAW,mBAAmB,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC7E;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAIwB;AACxB,6CAAgD;AAChD,qCAAkD;AAClD,4DAA8C;AAC9C,2CAAqD;AACrD,uCAA6C;AAC7C,+DAAmE;AACnE,mDAIyB;AACzB,iCAKgB;AAEhB,MAAM,WAAW,GAAG,qBAAqB,CAAC;AAC1C,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,QAAQ,GAAa,IAAA,wBAAY,EAAC,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAAC,CAAC;IAEtE,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,gCAAgB,EACtB,MAAM,IAAA,sCAAsB,EAC1B,WAAW,EACX,UAAU,EACV,SAAS,EACT,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,CAC7B,CACF,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,QAAQ,CACT,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;KACjD;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,+BAAsB,EAAE;YAC3C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;SACH;aAAM;YACL,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,IAAA,gCAAgB,EACpB,MAAM,IAAA,sCAAsB,EAC1B,WAAW,EACX,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,EACtB,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;SACH;QAED,OAAO;KACR;IAED,MAAM,IAAA,gCAAgB,EACpB,MAAM,IAAA,sCAAsB,EAC1B,WAAW,EACX,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,CACvB,CACF,CAAC;AACJ,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,GAAG,WAAW,mBAAmB,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;KAC7E;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
41
lib/setup-codeql.js
generated
41
lib/setup-codeql.js
generated
@@ -41,6 +41,7 @@ const api = __importStar(require("./api-client"));
|
||||
// creation scripts. Ensure that any changes to the format of this file are compatible with both of
|
||||
// these dependents.
|
||||
const defaults = __importStar(require("./defaults.json"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
var ToolsSource;
|
||||
@@ -446,19 +447,8 @@ async function downloadCodeQL(codeqlURL, maybeBundleVersion, maybeCliVersion, ap
|
||||
codeqlURL.includes(`/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/`)) {
|
||||
maybeCliVersion = await tryFindCliVersionDotcomOnly(`codeql-bundle-${bundleVersion}`, logger);
|
||||
}
|
||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||
// if the user requests the same URL again, we can get it from the cache without having to call
|
||||
// any of the Releases API.
|
||||
//
|
||||
// Special case: If the CLI version is a pre-release or contains build metadata, then cache the
|
||||
// bundle as `0.0.0-<bundleVersion>` to avoid the bundle being interpreted as containing a stable
|
||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||
const toolcacheVersion = maybeCliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
||||
? `${maybeCliVersion}-${bundleVersion}`
|
||||
: convertToSemVer(bundleVersion, logger);
|
||||
logger.debug("Caching CodeQL bundle.");
|
||||
const toolcacheVersion = getCanonicalToolcacheVersion(maybeCliVersion, bundleVersion, logger);
|
||||
const toolcachedBundlePath = await toolcache.cacheDir(extractedBundlePath, "CodeQL", toolcacheVersion);
|
||||
// Defensive check: we expect `cacheDir` to copy the bundle to a new location.
|
||||
if (toolcachedBundlePath !== extractedBundlePath) {
|
||||
@@ -479,6 +469,33 @@ function getCodeQLURLVersion(url) {
|
||||
return match[1];
|
||||
}
|
||||
exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
||||
/**
|
||||
* Returns the toolcache version number to use to store the bundle with the associated CLI version
|
||||
* and bundle version.
|
||||
*
|
||||
* This is the canonical version number, since toolcaches populated by different versions of the
|
||||
* CodeQL Action or different runner image creation scripts may store the bundle using a different
|
||||
* version number. Functions like `getCodeQLSource` that fetch the bundle from rather than save the
|
||||
* bundle to the toolcache should handle these different version numbers.
|
||||
*/
|
||||
function getCanonicalToolcacheVersion(cliVersion, bundleVersion, logger) {
|
||||
// If the CLI version is a pre-release or contains build metadata, then cache the
|
||||
// bundle as `0.0.0-<bundleVersion>` to avoid the bundle being interpreted as containing a stable
|
||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||
if (!cliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) {
|
||||
return convertToSemVer(bundleVersion, logger);
|
||||
}
|
||||
// If the bundle is semantically versioned, it can be looked up based on just the CLI version
|
||||
// number, so version it in the toolcache using just the CLI version number.
|
||||
if (semver.gte(cliVersion, feature_flags_1.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED)) {
|
||||
return cliVersion;
|
||||
}
|
||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||
// we can find the bundle in the toolcache based on either the CLI version or the bundle version.
|
||||
return `${cliVersion}-${bundleVersion}`;
|
||||
}
|
||||
/**
|
||||
* Obtains the CodeQL bundle, installs it in the toolcache if appropriate, and extracts it.
|
||||
*
|
||||
|
||||
File diff suppressed because one or more lines are too long
201
lib/status-report.js
generated
Normal file
201
lib/status-report.js
generated
Normal file
@@ -0,0 +1,201 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = void 0;
|
||||
const os = __importStar(require("os"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const environment_1 = require("./environment");
|
||||
const util_1 = require("./util");
|
||||
function getActionsStatus(error, otherFailureCause) {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof util_1.UserError ? "user-error" : "failure";
|
||||
}
|
||||
else {
|
||||
return "success";
|
||||
}
|
||||
}
|
||||
exports.getActionsStatus = getActionsStatus;
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, diskInfo, cause, exception) {
|
||||
const commitOid = (0, actions_util_1.getOptionalInput)("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await (0, actions_util_1.getRef)();
|
||||
const jobRunUUID = process.env[environment_1.EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = (0, actions_util_1.getWorkflowRunID)();
|
||||
const workflowRunAttempt = (0, actions_util_1.getWorkflowRunAttempt)();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await (0, api_client_1.getAnalysisKey)();
|
||||
let workflowStartedAt = process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(environment_1.EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[environment_1.EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(environment_1.EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
const statusReport = {
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: (0, actions_util_1.getActionVersion)(),
|
||||
analysis_key,
|
||||
commit_oid: commitOid,
|
||||
job_name: jobName,
|
||||
job_run_uuid: jobRunUUID,
|
||||
ref,
|
||||
runner_available_disk_space_bytes: diskInfo.numAvailableBytes,
|
||||
runner_os: runnerOs,
|
||||
runner_total_disk_space_bytes: diskInfo.numTotalBytes,
|
||||
started_at: workflowStartedAt,
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
workflow_name: workflowName,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_run_id: workflowRunID,
|
||||
};
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error") {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
const imageVersion = process.env["ImageVersion"];
|
||||
if (imageVersion) {
|
||||
statusReport.runner_image_version = imageVersion;
|
||||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
const GENERIC_403_MSG = "The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG = "Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
const nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = (0, api_client_1.getApiClient)();
|
||||
try {
|
||||
await client.request("PUT /repos/:owner/:repo/code-scanning/analysis/status", {
|
||||
owner,
|
||||
repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e);
|
||||
if ((0, util_1.isHTTPError)(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if ((0, actions_util_1.getWorkflowEventName)() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
|
||||
core.setFailed('Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.");
|
||||
}
|
||||
else {
|
||||
core.setFailed(e.message || GENERIC_403_MSG);
|
||||
}
|
||||
return false;
|
||||
case 404:
|
||||
core.setFailed(GENERIC_404_MSG);
|
||||
return false;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
// this means that this action version is no longer compatible with the API
|
||||
// we still want to continue as it is likely the analysis endpoint will work
|
||||
if ((0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL") !== util_1.GITHUB_DOTCOM_URL) {
|
||||
core.debug(INCOMPATIBLE_MSG);
|
||||
}
|
||||
else {
|
||||
core.debug(OUT_OF_DATE_MSG);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.error("An unexpected error occurred when sending code scanning status report.");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
//# sourceMappingURL=status-report.js.map
|
||||
1
lib/status-report.js.map
Normal file
1
lib/status-report.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"status-report.js","sourceRoot":"","sources":["../src/status-report.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AAEzB,oDAAsC;AAEtC,iDAQwB;AACxB,6CAA4D;AAC5D,+CAAuC;AACvC,iCAQgB;AAqGhB,SAAgB,gBAAgB,CAC9B,KAAe,EACf,iBAA0B;IAE1B,IAAI,KAAK,IAAI,iBAAiB,EAAE;QAC9B,OAAO,KAAK,YAAY,gBAAS,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC;KAC9D;SAAM;QACL,OAAO,SAAS,CAAC;KAClB;AACH,CAAC;AATD,4CASC;AAqBD;;;;;;;;GAQG;AACI,KAAK,UAAU,sBAAsB,CAC1C,UAAsB,EACtB,MAAoB,EACpB,eAAqB,EACrB,QAAmB,EACnB,KAAc,EACd,SAAkB;IAElB,MAAM,SAAS,GAAG,IAAA,+BAAgB,EAAC,KAAK,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC;IAC7E,MAAM,GAAG,GAAG,MAAM,IAAA,qBAAM,GAAE,CAAC;IAC3B,MAAM,UAAU,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC;IAC1D,MAAM,aAAa,GAAG,IAAA,+BAAgB,GAAE,CAAC;IACzC,MAAM,kBAAkB,GAAG,IAAA,oCAAqB,GAAE,CAAC;IACnD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,IAAI,EAAE,CAAC;IAC1D,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC;IAChD,MAAM,YAAY,GAAG,MAAM,IAAA,2BAAc,GAAE,CAAC;IAC5C,IAAI,iBAAiB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,CAAC;IAChE,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,iBAAiB,GAAG,eAAe,CAAC,WAAW,EAAE,CAAC;QAClD,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;KACpE;IACD,MAAM,QAAQ,GAAG,IAAA,0BAAmB,EAAC,WAAW,CAAC,CAAC;IAClD,MAAM,gBAAgB,GAAG,IAAA,6BAAsB,GAAE,CAAC;IAClD,MAAM,SAAS,GAAG,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;IACnD,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAC;IACzE,0FAA0F;IAC1F,wCAAwC;IACxC,IAAI,kBAAkB,KAAK,EAAE,EAAE;QAC7B,IAAI,CAAC,cAAc,CAAC,oBAAM,CAAC,mBAAmB,EAAE,kBAAkB,CAAC,CAAC;KACrE;IAED,MAAM,YAAY,GAAqB;QACrC,WAAW,EAAE,UAAU;QACvB,UAAU,EAAE,SAAS;QACrB,UAAU,EAAE,SAAS;QACrB,iBAAiB,EAAE,eAAe,CAAC,WAAW,EAAE;QAChD,cAAc,EAAE,IAAA,+BAAgB,GAAE;QAClC,YAAY;QACZ,UAAU,EAAE,SAAS;QACrB,QAAQ,EAAE,OAAO;QACjB,YAAY,EAAE,UAAU;QACxB,GAAG;QACH,iCAAiC,EAAE,QAAQ,CAAC,iBAAiB;QAC7D,SAAS,EAAE,QAAQ;QACnB,6BAA6B,EAAE,QAAQ,CAAC,aAAa;QACrD,UAAU,EAAE,iBAAiB;QAC7B,MAAM;QACN,mBAAmB,EAAE,kBAAkB;QACvC,aAAa,EAAE,YAAY;QAC3B,oBAAoB,EAAE,kBAAkB;QACxC,eAAe,EAAE,aAAa;KAC/B,CAAC;IAEF,0BAA0B;IAC1B,IAAI,KAAK,EAAE;QACT,YAAY,CAAC,KAAK,GAAG,KAAK,CAAC;KAC5B;IACD,IAAI,SAAS,EAAE;QACb,YAAY,CAAC,SAAS,GAAG,SAAS,CAAC;KACpC;IACD,IACE,MAAM,KAAK,SAAS;QACpB,MAAM,KAAK,SAAS;QACpB,MAAM,KAAK,SAAS;QACpB,MAAM,KAAK,YAAY,EACvB;QACA,YAAY,CAAC,YAAY,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;KACtD;IACD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,QAAQ,CAAC,CAAC;IAC1C,IAAI,MAAM,EAAE;QACV,YAAY,CAAC,WAAW,GAAG,MAAM,CAAC;KACnC;IACD,IAAI,aAAa,IAAI,OAAO,CAAC,GAAG,EAAE;QAChC,sDAAsD;QACtD,sEAAsE;QACtE,YAAY,CAAC,WAAW,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,CAAC;KACvD;IACD,IAAI,QAAQ,KAAK,SAAS,IAAI,QAAQ,KAAK,OAAO,EAAE;QAClD,YAAY,CAAC,iBAAiB,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC;KAC/C;IACD,IAAI,gBAAgB,KAAK,SAAS,EAAE;QAClC,YAAY,CAAC,cAAc,GAAG,gBAAgB,CAAC;KAChD;IACD,MAAM,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;IACjD,IAAI,YAAY,EAAE;QAChB,YAAY,CAAC,oBAAoB,GAAG,YAAY,CAAC;KAClD;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AAzFD,wDAyFC;AAED,MAAM,eAAe,GACnB,mFAAmF,CAAC;AACtF,MAAM,eAAe,GACnB,sEAAsE,CAAC;AACzE,MAAM,eAAe,GACnB,sFAAsF,CAAC;AACzF,MAAM,gBAAgB,GACpB,gIAAgI,CAAC;AAEnI;;;;;;;;GAQG;AACI,KAAK,UAAU,gBAAgB,CACpC,YAAe;IAEf,MAAM,gBAAgB,GAAG,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;IACtD,IAAI,CAAC,KAAK,CAAC,0BAA0B,gBAAgB,EAAE,CAAC,CAAC;IACzD,sDAAsD;IACtD,IAAI,IAAA,mBAAY,GAAE,EAAE;QAClB,IAAI,CAAC,KAAK,CAAC,gDAAgD,CAAC,CAAC;QAC7D,OAAO,IAAI,CAAC;KACb;IAED,MAAM,GAAG,GAAG,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC;IACrD,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACrC,MAAM,MAAM,GAAG,IAAA,yBAAY,GAAE,CAAC;IAE9B,IAAI;QACF,MAAM,MAAM,CAAC,OAAO,CAClB,uDAAuD,EACvD;YACE,KAAK;YACL,IAAI;YACJ,IAAI,EAAE,gBAAgB;SACvB,CACF,CAAC;QAEF,OAAO,IAAI,CAAC;KACb;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACf,IAAI,IAAA,kBAAW,EAAC,CAAC,CAAC,EAAE;YAClB,QAAQ,CAAC,CAAC,MAAM,EAAE;gBAChB,KAAK,GAAG;oBACN,IACE,IAAA,mCAAoB,GAAE,KAAK,MAAM;wBACjC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,iBAAiB,EACjD;wBACA,IAAI,CAAC,SAAS,CACZ,mFAAmF;4BACjF,yDAAyD;4BACzD,iLAAiL;4BACjL,8JAA8J,CACjK,CAAC;qBACH;yBAAM;wBACL,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,IAAI,eAAe,CAAC,CAAC;qBAC9C;oBACD,OAAO,KAAK,CAAC;gBACf,KAAK,GAAG;oBACN,IAAI,CAAC,SAAS,CAAC,eAAe,CAAC,CAAC;oBAChC,OAAO,KAAK,CAAC;gBACf,KAAK,GAAG;oBACN,+CAA+C;oBAC/C,2EAA2E;oBAC3E,4EAA4E;oBAC5E,IAAI,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,KAAK,wBAAiB,EAAE;wBAClE,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;qBAC9B;yBAAM;wBACL,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;qBAC7B;oBACD,OAAO,IAAI,CAAC;aACf;SACF;QAED,mFAAmF;QACnF,0EAA0E;QAC1E,IAAI,CAAC,KAAK,CACR,wEAAwE,CACzE,CAAC;QACF,OAAO,IAAI,CAAC;KACb;AACH,CAAC;AApED,4CAoEC"}
|
||||
72
lib/status-report.test.js
generated
Normal file
72
lib/status-report.test.js
generated
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const environment_1 = require("./environment");
|
||||
const status_report_1 = require("./status-report");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
(0, ava_1.default)("createStatusReportBase", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
||||
process.env["GITHUB_RUN_ATTEMPT"] = "2";
|
||||
process.env["GITHUB_RUN_ID"] = "100";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
process.env["ImageVersion"] = "2023.05.19.1";
|
||||
process.env["RUNNER_OS"] = "macOS";
|
||||
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||
const statusReport = await (0, status_report_1.createStatusReportBase)("init", "failure", new Date("May 19, 2023 05:19:00"), { numAvailableBytes: 100, numTotalBytes: 500 }, "failure cause", "exception stack trace");
|
||||
t.is(statusReport.action_name, "init");
|
||||
t.is(statusReport.action_oid, "unknown");
|
||||
t.is(typeof statusReport.action_version, "string");
|
||||
t.is(statusReport.action_started_at, new Date("May 19, 2023 05:19:00").toISOString());
|
||||
t.is(statusReport.analysis_key, "analysis-key");
|
||||
t.is(statusReport.cause, "failure cause");
|
||||
t.is(statusReport.commit_oid, process.env["GITHUB_SHA"]);
|
||||
t.is(statusReport.exception, "exception stack trace");
|
||||
t.is(statusReport.job_name, process.env["GITHUB_JOB"] || "");
|
||||
t.is(typeof statusReport.job_run_uuid, "string");
|
||||
t.is(statusReport.ref, process.env["GITHUB_REF"]);
|
||||
t.is(statusReport.runner_available_disk_space_bytes, 100);
|
||||
t.is(statusReport.runner_image_version, process.env["ImageVersion"]);
|
||||
t.is(statusReport.runner_os, process.env["RUNNER_OS"]);
|
||||
t.is(statusReport.started_at, process.env[environment_1.EnvVar.WORKFLOW_STARTED_AT]);
|
||||
t.is(statusReport.status, "failure");
|
||||
t.is(statusReport.workflow_name, process.env["GITHUB_WORKFLOW"] || "");
|
||||
t.is(statusReport.workflow_run_attempt, 2);
|
||||
t.is(statusReport.workflow_run_id, 100);
|
||||
});
|
||||
});
|
||||
//# sourceMappingURL=status-report.test.js.map
|
||||
1
lib/status-report.test.js.map
Normal file
1
lib/status-report.test.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"status-report.test.js","sourceRoot":"","sources":["../src/status-report.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AACvB,6CAA+B;AAE/B,4DAA8C;AAC9C,+CAAuC;AACvC,mDAAyD;AACzD,mDAA+D;AAC/D,iCAAoC;AAEpC,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB,IAAA,aAAI,EAAC,wBAAwB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACzC,MAAM,IAAA,iBAAU,EAAC,KAAK,EAAE,MAAc,EAAE,EAAE;QACxC,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,OAAO,CAAC,GAAG,CAAC,4BAA4B,CAAC,GAAG,cAAc,CAAC;QAC3D,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,iBAAiB,CAAC;QAC9C,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,oBAAoB,CAAC;QACxD,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,GAAG,GAAG,CAAC;QACxC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;QAC3C,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,GAAG,cAAc,CAAC;QAC7C,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,OAAO,CAAC;QAEnC,MAAM,gBAAgB,GAAG,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,kBAAkB,CAAC,CAAC;QACrE,gBAAgB,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC;QAE7D,MAAM,YAAY,GAAG,MAAM,IAAA,sCAAsB,EAC/C,MAAM,EACN,SAAS,EACT,IAAI,IAAI,CAAC,uBAAuB,CAAC,EACjC,EAAE,iBAAiB,EAAE,GAAG,EAAE,aAAa,EAAE,GAAG,EAAE,EAC9C,eAAe,EACf,uBAAuB,CACxB,CAAC;QAEF,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;QACvC,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,SAAS,CAAC,CAAC;QACzC,CAAC,CAAC,EAAE,CAAC,OAAO,YAAY,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC,CAAC,EAAE,CACF,YAAY,CAAC,iBAAiB,EAC9B,IAAI,IAAI,CAAC,uBAAuB,CAAC,CAAC,WAAW,EAAE,CAChD,CAAC;QACF,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,YAAY,EAAE,cAAc,CAAC,CAAC;QAChD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,KAAK,EAAE,eAAe,CAAC,CAAC;QAC1C,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QACzD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,SAAS,EAAE,uBAAuB,CAAC,CAAC;QACtD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC;QAC7D,CAAC,CAAC,EAAE,CAAC,OAAO,YAAY,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACjD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QAClD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,iCAAiC,EAAE,GAAG,CAAC,CAAC;QAC1D,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,oBAAoB,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;QACrE,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,SAAS,EAAE,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC;QACvD,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,GAAG,CAAC,oBAAM,CAAC,mBAAmB,CAAE,CAAC,CAAC;QACxE,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;QACrC,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,aAAa,EAAE,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,IAAI,EAAE,CAAC,CAAC;QACvE,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,oBAAoB,EAAE,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,eAAe,EAAE,GAAG,CAAC,CAAC;IAC1C,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
15
lib/upload-sarif-action.js
generated
15
lib/upload-sarif-action.js
generated
@@ -26,34 +26,35 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const api_client_1 = require("./api-client");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const status_report_1 = require("./status-report");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||
const statusReportBase = await (0, api_client_1.createStatusReportBase)("upload-sarif", "success", startedAt);
|
||||
const statusReportBase = await (0, status_report_1.createStatusReportBase)("upload-sarif", "success", startedAt, await (0, util_1.checkDiskUsage)());
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await (0, api_client_1.sendStatusReport)(statusReport);
|
||||
await (0, status_report_1.sendStatusReport)(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
||||
if (!(await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("upload-sarif", "starting", startedAt)))) {
|
||||
if (!(await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("upload-sarif", "starting", startedAt, await (0, util_1.checkDiskUsage)())))) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), (0, logging_1.getActionsLogger)());
|
||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if ((0, util_1.isInTestMode)()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
}
|
||||
else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, (0, logging_1.getActionsLogger)());
|
||||
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, logger);
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||
}
|
||||
@@ -62,7 +63,7 @@ async function run() {
|
||||
const message = error.message;
|
||||
core.setFailed(message);
|
||||
console.log(error);
|
||||
await (0, api_client_1.sendStatusReport)(await (0, api_client_1.createStatusReportBase)("upload-sarif", actionsUtil.getActionsStatus(error), startedAt, message, error.stack));
|
||||
await (0, status_report_1.sendStatusReport)(await (0, status_report_1.createStatusReportBase)("upload-sarif", (0, status_report_1.getActionsStatus)(error), startedAt, await (0, util_1.checkDiskUsage)(), message, error.stack));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAkD;AAClD,6CAAwE;AACxE,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,IAAA,mCAAsB,EACnD,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,IAAA,6BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAC1C,IACE,CAAC,CAAC,MAAM,IAAA,6BAAgB,EACtB,MAAM,IAAA,mCAAsB,EAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,CACpE,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAA,6BAAgB,EACpB,MAAM,IAAA,mCAAsB,EAC1B,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACjE,CAAC;KACH;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAAkD;AAClD,uCAA6C;AAC7C,6CAAkD;AAClD,mDAKyB;AACzB,yDAA2C;AAC3C,iCAMgB;AAMhB,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,cAAc,EACd,SAAS,EACT,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,CACvB,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,IAAA,gCAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAC1C,IACE,CAAC,CAAC,MAAM,IAAA,gCAAgB,EACtB,MAAM,IAAA,sCAAsB,EAC1B,cAAc,EACd,UAAU,EACV,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,CACvB,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,MAAM,CACP,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,MAAM,CACP,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,cAAc,EAAE;QACvB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QACxC,MAAM,OAAO,GAAG,KAAK,CAAC,OAAO,CAAC;QAC9B,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,IAAA,gCAAgB,EACpB,MAAM,IAAA,sCAAsB,EAC1B,cAAc,EACd,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,IAAA,qBAAc,GAAE,EACtB,OAAO,EACP,KAAK,CAAC,KAAK,CACZ,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,sCAAsC,IAAA,gBAAS,EAAC,KAAK,CAAC,CAAC,OAAO,EAAE,CACjE,CAAC;KACH;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
23
lib/util.js
generated
23
lib/util.js
generated
@@ -26,12 +26,13 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.prettyPrintPack = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.wrapError = exports.fixInvalidNotificationsInFile = exports.fixInvalidNotifications = exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.isInTestMode = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.initializeEnvironment = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.getMemoryFlagValueForPlatform = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.checkDiskUsage = exports.prettyPrintPack = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.wrapError = exports.fixInvalidNotificationsInFile = exports.fixInvalidNotifications = exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.isInTestMode = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.initializeEnvironment = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.getMemoryFlagValueForPlatform = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const check_disk_space_1 = __importDefault(require("check-disk-space"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const get_folder_size_1 = __importDefault(require("get-folder-size"));
|
||||
const semver = __importStar(require("semver"));
|
||||
@@ -682,4 +683,24 @@ function prettyPrintPack(pack) {
|
||||
return `${pack.name}${pack.version ? `@${pack.version}` : ""}${pack.path ? `:${pack.path}` : ""}`;
|
||||
}
|
||||
exports.prettyPrintPack = prettyPrintPack;
|
||||
async function checkDiskUsage(logger) {
|
||||
const diskUsage = await (0, check_disk_space_1.default)(getRequiredEnvParam("GITHUB_WORKSPACE"));
|
||||
const gbInBytes = 1024 * 1024 * 1024;
|
||||
if (logger && diskUsage.free < 2 * gbInBytes) {
|
||||
const message = "The Actions runner is running low on disk space " +
|
||||
`(${(diskUsage.free / gbInBytes).toPrecision(4)} GB available).`;
|
||||
if (process.env[environment_1.EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
|
||||
logger.warning(message);
|
||||
}
|
||||
else {
|
||||
logger.debug(message);
|
||||
}
|
||||
core.exportVariable(environment_1.EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
|
||||
}
|
||||
return {
|
||||
numAvailableBytes: diskUsage.free,
|
||||
numTotalBytes: diskUsage.size,
|
||||
};
|
||||
}
|
||||
exports.checkDiskUsage = checkDiskUsage;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
10
node_modules/.package-lock.json
generated
vendored
10
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.21.3",
|
||||
"version": "2.21.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
@@ -1751,6 +1751,14 @@
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/check-disk-space": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/check-disk-space/-/check-disk-space-3.4.0.tgz",
|
||||
"integrity": "sha512-drVkSqfwA+TvuEhFipiR1OC9boEGZL5RrWvVsOthdcvQNXyCCuKkEiTOTXZ7qxSf/GLwq4GvzfrQD/Wz325hgw==",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
||||
|
||||
21
node_modules/check-disk-space/LICENSE
generated
vendored
Normal file
21
node_modules/check-disk-space/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017-2019 Alexandre Demode
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
53
node_modules/check-disk-space/README.md
generated
vendored
Normal file
53
node_modules/check-disk-space/README.md
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
# Check disk space
|
||||
|
||||
[](https://github.com/Alex-D/check-disk-space/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/check-disk-space)
|
||||
[](LICENSE)
|
||||
|
||||
|
||||
## Introduction
|
||||
|
||||
Light multi-platform disk space checker without third party for Node.js.
|
||||
|
||||
- Works on Linux, macOS and Windows
|
||||
- Take care of mounting points on unix-like systems
|
||||
- No dependencies
|
||||
- TypeScript support
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
`npm install check-disk-space`
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
// ES
|
||||
import checkDiskSpace from 'check-disk-space'
|
||||
|
||||
// CommonJS
|
||||
const checkDiskSpace = require('check-disk-space').default
|
||||
|
||||
// On Windows
|
||||
checkDiskSpace('C:/blabla/bla').then((diskSpace) => {
|
||||
console.log(diskSpace)
|
||||
// {
|
||||
// diskPath: 'C:',
|
||||
// free: 12345678,
|
||||
// size: 98756432
|
||||
// }
|
||||
// Note: `free` and `size` are in bytes
|
||||
})
|
||||
|
||||
// On Linux or macOS
|
||||
checkDiskSpace('/mnt/mygames').then((diskSpace) => {
|
||||
console.log(diskSpace)
|
||||
// {
|
||||
// diskPath: '/',
|
||||
// free: 12345678,
|
||||
// size: 98756432
|
||||
// }
|
||||
// Note: `free` and `size` are in bytes
|
||||
})
|
||||
```
|
||||
210
node_modules/check-disk-space/dist/check-disk-space.cjs
generated
vendored
Normal file
210
node_modules/check-disk-space/dist/check-disk-space.cjs
generated
vendored
Normal file
@@ -0,0 +1,210 @@
|
||||
'use strict';
|
||||
|
||||
Object.defineProperty(exports, '__esModule', { value: true });
|
||||
|
||||
var node_child_process = require('node:child_process');
|
||||
var promises = require('node:fs/promises');
|
||||
var node_os = require('node:os');
|
||||
var node_path = require('node:path');
|
||||
var node_process = require('node:process');
|
||||
var node_util = require('node:util');
|
||||
|
||||
class InvalidPathError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidPathError';
|
||||
// Set the prototype explicitly.
|
||||
Object.setPrototypeOf(this, InvalidPathError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
class NoMatchError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'NoMatchError';
|
||||
// Set the prototype explicitly.
|
||||
Object.setPrototypeOf(this, NoMatchError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells if directory exists
|
||||
*
|
||||
* @param directoryPath - The file/folder path
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
async function isDirectoryExisting(directoryPath, dependencies) {
|
||||
try {
|
||||
await dependencies.fsAccess(directoryPath);
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
catch (error) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first existing parent path
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
async function getFirstExistingParentPath(directoryPath, dependencies) {
|
||||
let parentDirectoryPath = directoryPath;
|
||||
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
|
||||
while (!parentDirectoryFound) {
|
||||
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + '/..');
|
||||
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
|
||||
}
|
||||
return parentDirectoryPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tell if PowerShell 3 is available based on Windows version
|
||||
*
|
||||
* Note: 6.* is Windows 7
|
||||
* Note: PowerShell 3 is natively available since Windows 8
|
||||
*
|
||||
* @param dependencies - Dependencies Injection Container
|
||||
*/
|
||||
async function hasPowerShell3(dependencies) {
|
||||
const major = parseInt(dependencies.release.split('.')[0], 10);
|
||||
if (major <= 6) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await dependencies.cpExecFile('where', ['powershell'], { windowsHide: true });
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check disk space
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
function checkDiskSpace(directoryPath, dependencies = {
|
||||
platform: node_process.platform,
|
||||
release: node_os.release(),
|
||||
fsAccess: promises.access,
|
||||
pathNormalize: node_path.normalize,
|
||||
pathSep: node_path.sep,
|
||||
cpExecFile: node_util.promisify(node_child_process.execFile),
|
||||
}) {
|
||||
// Note: This function contains other functions in order
|
||||
// to wrap them in a common context and make unit tests easier
|
||||
/**
|
||||
* Maps command output to a normalized object {diskPath, free, size}
|
||||
*
|
||||
* @param stdout - The command output
|
||||
* @param filter - To filter drives (only used for win32)
|
||||
* @param mapping - Map between column index and normalized column name
|
||||
* @param coefficient - The size coefficient to get bytes instead of kB
|
||||
*/
|
||||
function mapOutput(stdout, filter, mapping, coefficient) {
|
||||
const parsed = stdout
|
||||
.split('\n') // Split lines
|
||||
.map(line => line.trim()) // Trim all lines
|
||||
.filter(line => line.length !== 0) // Remove empty lines
|
||||
.slice(1) // Remove header
|
||||
.map(line => line.split(/\s+(?=[\d/])/)); // Split on spaces to get columns
|
||||
const filtered = parsed.filter(filter);
|
||||
if (filtered.length === 0) {
|
||||
throw new NoMatchError();
|
||||
}
|
||||
const diskData = filtered[0];
|
||||
return {
|
||||
diskPath: diskData[mapping.diskPath],
|
||||
free: parseInt(diskData[mapping.free], 10) * coefficient,
|
||||
size: parseInt(diskData[mapping.size], 10) * coefficient,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Run the command and do common things between win32 and unix
|
||||
*
|
||||
* @param cmd - The command to execute
|
||||
* @param filter - To filter drives (only used for win32)
|
||||
* @param mapping - Map between column index and normalized column name
|
||||
* @param coefficient - The size coefficient to get bytes instead of kB
|
||||
*/
|
||||
async function check(cmd, filter, mapping, coefficient = 1) {
|
||||
const [file, ...args] = cmd;
|
||||
/* istanbul ignore if */
|
||||
if (file === undefined) {
|
||||
return Promise.reject(new Error('cmd must contain at least one item'));
|
||||
}
|
||||
try {
|
||||
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
|
||||
return mapOutput(stdout, filter, mapping, coefficient);
|
||||
}
|
||||
catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Build the check call for win32
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
*/
|
||||
async function checkWin32(directoryPath) {
|
||||
if (directoryPath.charAt(1) !== ':') {
|
||||
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath}`));
|
||||
}
|
||||
const powershellCmd = [
|
||||
'powershell',
|
||||
'Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size',
|
||||
];
|
||||
const wmicCmd = [
|
||||
'wmic',
|
||||
'logicaldisk',
|
||||
'get',
|
||||
'size,freespace,caption',
|
||||
];
|
||||
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
|
||||
return check(cmd, driveData => {
|
||||
// Only get the drive which match the path
|
||||
const driveLetter = driveData[0];
|
||||
return directoryPath.toUpperCase().startsWith(driveLetter.toUpperCase());
|
||||
}, {
|
||||
diskPath: 0,
|
||||
free: 1,
|
||||
size: 2,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Build the check call for unix
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
*/
|
||||
async function checkUnix(directoryPath) {
|
||||
if (!dependencies.pathNormalize(directoryPath).startsWith(dependencies.pathSep)) {
|
||||
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath}`));
|
||||
}
|
||||
const pathToCheck = await getFirstExistingParentPath(directoryPath, dependencies);
|
||||
return check([
|
||||
'df',
|
||||
'-Pk',
|
||||
'--',
|
||||
pathToCheck,
|
||||
], () => true, // We should only get one line, so we did not need to filter
|
||||
{
|
||||
diskPath: 5,
|
||||
free: 3,
|
||||
size: 1,
|
||||
}, 1024);
|
||||
}
|
||||
// Call the right check depending on the OS
|
||||
if (dependencies.platform === 'win32') {
|
||||
return checkWin32(directoryPath);
|
||||
}
|
||||
return checkUnix(directoryPath);
|
||||
}
|
||||
|
||||
exports.InvalidPathError = InvalidPathError;
|
||||
exports.NoMatchError = NoMatchError;
|
||||
exports.default = checkDiskSpace;
|
||||
exports.getFirstExistingParentPath = getFirstExistingParentPath;
|
||||
53
node_modules/check-disk-space/dist/check-disk-space.d.ts
generated
vendored
Normal file
53
node_modules/check-disk-space/dist/check-disk-space.d.ts
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import { access } from 'node:fs/promises';
|
||||
import { normalize, sep } from 'node:path';
|
||||
|
||||
declare class InvalidPathError extends Error {
|
||||
name: string;
|
||||
constructor(message?: string);
|
||||
}
|
||||
|
||||
declare class NoMatchError extends Error {
|
||||
name: string;
|
||||
constructor(message?: string);
|
||||
}
|
||||
|
||||
type Dependencies = {
|
||||
platform: NodeJS.Platform;
|
||||
release: string;
|
||||
fsAccess: typeof access;
|
||||
pathNormalize: typeof normalize;
|
||||
pathSep: typeof sep;
|
||||
cpExecFile: (file: string, args: ReadonlyArray<string> | undefined | null, options: {
|
||||
windowsHide: true;
|
||||
}) => Promise<{
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the first existing parent path
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
declare function getFirstExistingParentPath(directoryPath: string, dependencies: Dependencies): Promise<string>;
|
||||
|
||||
/**
|
||||
* `free` and `size` are in bytes
|
||||
*/
|
||||
type DiskSpace = {
|
||||
diskPath: string;
|
||||
free: number;
|
||||
size: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check disk space
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
declare function checkDiskSpace(directoryPath: string, dependencies?: Dependencies): Promise<DiskSpace>;
|
||||
|
||||
export { Dependencies, DiskSpace, InvalidPathError, NoMatchError, checkDiskSpace as default, getFirstExistingParentPath };
|
||||
203
node_modules/check-disk-space/dist/check-disk-space.mjs
generated
vendored
Normal file
203
node_modules/check-disk-space/dist/check-disk-space.mjs
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
import { execFile } from 'node:child_process';
|
||||
import { access } from 'node:fs/promises';
|
||||
import { release } from 'node:os';
|
||||
import { normalize, sep } from 'node:path';
|
||||
import { platform } from 'node:process';
|
||||
import { promisify } from 'node:util';
|
||||
|
||||
class InvalidPathError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'InvalidPathError';
|
||||
// Set the prototype explicitly.
|
||||
Object.setPrototypeOf(this, InvalidPathError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
class NoMatchError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
this.name = 'NoMatchError';
|
||||
// Set the prototype explicitly.
|
||||
Object.setPrototypeOf(this, NoMatchError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tells if directory exists
|
||||
*
|
||||
* @param directoryPath - The file/folder path
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
async function isDirectoryExisting(directoryPath, dependencies) {
|
||||
try {
|
||||
await dependencies.fsAccess(directoryPath);
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
catch (error) {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first existing parent path
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
async function getFirstExistingParentPath(directoryPath, dependencies) {
|
||||
let parentDirectoryPath = directoryPath;
|
||||
let parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
|
||||
while (!parentDirectoryFound) {
|
||||
parentDirectoryPath = dependencies.pathNormalize(parentDirectoryPath + '/..');
|
||||
parentDirectoryFound = await isDirectoryExisting(parentDirectoryPath, dependencies);
|
||||
}
|
||||
return parentDirectoryPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tell if PowerShell 3 is available based on Windows version
|
||||
*
|
||||
* Note: 6.* is Windows 7
|
||||
* Note: PowerShell 3 is natively available since Windows 8
|
||||
*
|
||||
* @param dependencies - Dependencies Injection Container
|
||||
*/
|
||||
async function hasPowerShell3(dependencies) {
|
||||
const major = parseInt(dependencies.release.split('.')[0], 10);
|
||||
if (major <= 6) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await dependencies.cpExecFile('where', ['powershell'], { windowsHide: true });
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check disk space
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
* @param dependencies - Dependencies container
|
||||
*/
|
||||
function checkDiskSpace(directoryPath, dependencies = {
|
||||
platform,
|
||||
release: release(),
|
||||
fsAccess: access,
|
||||
pathNormalize: normalize,
|
||||
pathSep: sep,
|
||||
cpExecFile: promisify(execFile),
|
||||
}) {
|
||||
// Note: This function contains other functions in order
|
||||
// to wrap them in a common context and make unit tests easier
|
||||
/**
|
||||
* Maps command output to a normalized object {diskPath, free, size}
|
||||
*
|
||||
* @param stdout - The command output
|
||||
* @param filter - To filter drives (only used for win32)
|
||||
* @param mapping - Map between column index and normalized column name
|
||||
* @param coefficient - The size coefficient to get bytes instead of kB
|
||||
*/
|
||||
function mapOutput(stdout, filter, mapping, coefficient) {
|
||||
const parsed = stdout
|
||||
.split('\n') // Split lines
|
||||
.map(line => line.trim()) // Trim all lines
|
||||
.filter(line => line.length !== 0) // Remove empty lines
|
||||
.slice(1) // Remove header
|
||||
.map(line => line.split(/\s+(?=[\d/])/)); // Split on spaces to get columns
|
||||
const filtered = parsed.filter(filter);
|
||||
if (filtered.length === 0) {
|
||||
throw new NoMatchError();
|
||||
}
|
||||
const diskData = filtered[0];
|
||||
return {
|
||||
diskPath: diskData[mapping.diskPath],
|
||||
free: parseInt(diskData[mapping.free], 10) * coefficient,
|
||||
size: parseInt(diskData[mapping.size], 10) * coefficient,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Run the command and do common things between win32 and unix
|
||||
*
|
||||
* @param cmd - The command to execute
|
||||
* @param filter - To filter drives (only used for win32)
|
||||
* @param mapping - Map between column index and normalized column name
|
||||
* @param coefficient - The size coefficient to get bytes instead of kB
|
||||
*/
|
||||
async function check(cmd, filter, mapping, coefficient = 1) {
|
||||
const [file, ...args] = cmd;
|
||||
/* istanbul ignore if */
|
||||
if (file === undefined) {
|
||||
return Promise.reject(new Error('cmd must contain at least one item'));
|
||||
}
|
||||
try {
|
||||
const { stdout } = await dependencies.cpExecFile(file, args, { windowsHide: true });
|
||||
return mapOutput(stdout, filter, mapping, coefficient);
|
||||
}
|
||||
catch (error) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Build the check call for win32
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
*/
|
||||
async function checkWin32(directoryPath) {
|
||||
if (directoryPath.charAt(1) !== ':') {
|
||||
return Promise.reject(new InvalidPathError(`The following path is invalid (should be X:\\...): ${directoryPath}`));
|
||||
}
|
||||
const powershellCmd = [
|
||||
'powershell',
|
||||
'Get-CimInstance -ClassName Win32_LogicalDisk | Select-Object Caption, FreeSpace, Size',
|
||||
];
|
||||
const wmicCmd = [
|
||||
'wmic',
|
||||
'logicaldisk',
|
||||
'get',
|
||||
'size,freespace,caption',
|
||||
];
|
||||
const cmd = await hasPowerShell3(dependencies) ? powershellCmd : wmicCmd;
|
||||
return check(cmd, driveData => {
|
||||
// Only get the drive which match the path
|
||||
const driveLetter = driveData[0];
|
||||
return directoryPath.toUpperCase().startsWith(driveLetter.toUpperCase());
|
||||
}, {
|
||||
diskPath: 0,
|
||||
free: 1,
|
||||
size: 2,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Build the check call for unix
|
||||
*
|
||||
* @param directoryPath - The file/folder path from where we want to know disk space
|
||||
*/
|
||||
async function checkUnix(directoryPath) {
|
||||
if (!dependencies.pathNormalize(directoryPath).startsWith(dependencies.pathSep)) {
|
||||
return Promise.reject(new InvalidPathError(`The following path is invalid (should start by ${dependencies.pathSep}): ${directoryPath}`));
|
||||
}
|
||||
const pathToCheck = await getFirstExistingParentPath(directoryPath, dependencies);
|
||||
return check([
|
||||
'df',
|
||||
'-Pk',
|
||||
'--',
|
||||
pathToCheck,
|
||||
], () => true, // We should only get one line, so we did not need to filter
|
||||
{
|
||||
diskPath: 5,
|
||||
free: 3,
|
||||
size: 1,
|
||||
}, 1024);
|
||||
}
|
||||
// Call the right check depending on the OS
|
||||
if (dependencies.platform === 'win32') {
|
||||
return checkWin32(directoryPath);
|
||||
}
|
||||
return checkUnix(directoryPath);
|
||||
}
|
||||
|
||||
export { InvalidPathError, NoMatchError, checkDiskSpace as default, getFirstExistingParentPath };
|
||||
83
node_modules/check-disk-space/package.json
generated
vendored
Normal file
83
node_modules/check-disk-space/package.json
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
"name": "check-disk-space",
|
||||
"version": "3.4.0",
|
||||
"description": "Light multi-platform disk space checker without third party for Node.js",
|
||||
"files": [
|
||||
"dist",
|
||||
"README.md"
|
||||
],
|
||||
"main": "./dist/check-disk-space.cjs",
|
||||
"module": "./dist/check-disk-space.mjs",
|
||||
"types": "./dist/check-disk-space.d.ts",
|
||||
"exports": {
|
||||
"import": "./dist/check-disk-space.mjs",
|
||||
"require": "./dist/check-disk-space.cjs",
|
||||
"types": "./dist/check-disk-space.d.ts"
|
||||
},
|
||||
"scripts": {
|
||||
"build:lib": "rollup --config",
|
||||
"build:dts": "rollup --config rollup.dts.config.mjs",
|
||||
"build": "npm-run-all build:lib build:dts",
|
||||
"lint": "eslint . --cache",
|
||||
"lint:fix": "eslint --fix .",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "npm-run-all test:coverage lint typecheck",
|
||||
"test:unit": "NODE_ENV=test TS_NODE_PROJECT='tsconfig.test.json' ava",
|
||||
"test:coverage": "nyc --reporter=lcov --reporter=text npm run test:unit --silent"
|
||||
},
|
||||
"ava": {
|
||||
"files": [
|
||||
"test/**",
|
||||
"!test/__helpers__/**"
|
||||
],
|
||||
"extensions": [
|
||||
"ts"
|
||||
],
|
||||
"require": [
|
||||
"tsconfig-paths/register",
|
||||
"ts-node/register",
|
||||
"source-map-support/register"
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex-d/eslint-config": "2.2.0",
|
||||
"@istanbuljs/nyc-config-typescript": "1.0.2",
|
||||
"@rollup/plugin-typescript": "11.1.1",
|
||||
"@types/node": "16.11.7",
|
||||
"@typescript-eslint/eslint-plugin": "5.59.6",
|
||||
"@typescript-eslint/parser": "5.59.6",
|
||||
"ava": "5.2.0",
|
||||
"eslint": "8.41.0",
|
||||
"eslint-plugin-ava": "14.0.0",
|
||||
"eslint-plugin-import": "2.27.5",
|
||||
"eslint-plugin-simple-import-sort": "10.0.0",
|
||||
"npm-run-all": "4.1.5",
|
||||
"nyc": "15.1.0",
|
||||
"rollup": "3.22.0",
|
||||
"rollup-plugin-dts": "5.3.0",
|
||||
"source-map-support": "0.5.21",
|
||||
"ts-node": "10.9.1",
|
||||
"tsconfig-paths": "4.2.0",
|
||||
"tslib": "2.5.2",
|
||||
"typescript": "5.0.4"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/Alex-D/check-disk-space.git"
|
||||
},
|
||||
"keywords": [
|
||||
"disk",
|
||||
"space",
|
||||
"hdd",
|
||||
"free"
|
||||
],
|
||||
"author": "Alex-D <contact@alex-d.fr> (https://alex-d.fr)",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/Alex-D/check-disk-space/issues"
|
||||
},
|
||||
"homepage": "https://github.com/Alex-D/check-disk-space#readme"
|
||||
}
|
||||
13
package-lock.json
generated
13
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.21.3",
|
||||
"version": "2.21.4",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "2.21.3",
|
||||
"version": "2.21.4",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^1.1.1",
|
||||
@@ -22,6 +22,7 @@
|
||||
"@schemastore/package": "0.0.10",
|
||||
"@types/uuid": "^9.0.2",
|
||||
"adm-zip": "^0.5.10",
|
||||
"check-disk-space": "^3.4.0",
|
||||
"console-log-level": "^1.4.1",
|
||||
"del": "^6.1.1",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
@@ -1807,6 +1808,14 @@
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/check-disk-space": {
|
||||
"version": "3.4.0",
|
||||
"resolved": "https://registry.npmjs.org/check-disk-space/-/check-disk-space-3.4.0.tgz",
|
||||
"integrity": "sha512-drVkSqfwA+TvuEhFipiR1OC9boEGZL5RrWvVsOthdcvQNXyCCuKkEiTOTXZ7qxSf/GLwq4GvzfrQD/Wz325hgw==",
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.21.3",
|
||||
"version": "2.21.4",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -34,6 +34,7 @@
|
||||
"@schemastore/package": "0.0.10",
|
||||
"@types/uuid": "^9.0.2",
|
||||
"adm-zip": "^0.5.10",
|
||||
"check-disk-space": "^3.4.0",
|
||||
"console-log-level": "^1.4.1",
|
||||
"del": "^6.1.1",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
|
||||
@@ -22,8 +22,8 @@ predicate isSafeForDefaultSetup(string envVar) {
|
||||
"GITHUB_ACTION_REF", "GITHUB_ACTION_REPOSITORY", "GITHUB_ACTOR", "GITHUB_API_URL",
|
||||
"GITHUB_BASE_REF", "GITHUB_EVENT_NAME", "GITHUB_JOB", "GITHUB_RUN_ATTEMPT", "GITHUB_RUN_ID",
|
||||
"GITHUB_SHA", "GITHUB_REPOSITORY", "GITHUB_SERVER_URL", "GITHUB_TOKEN", "GITHUB_WORKFLOW",
|
||||
"GITHUB_WORKSPACE", "GOFLAGS", "JAVA_TOOL_OPTIONS", "RUNNER_ARCH", "RUNNER_NAME", "RUNNER_OS",
|
||||
"RUNNER_TEMP", "RUNNER_TOOL_CACHE"
|
||||
"GITHUB_WORKSPACE", "GOFLAGS", "ImageVersion", "JAVA_TOOL_OPTIONS", "RUNNER_ARCH",
|
||||
"RUNNER_NAME", "RUNNER_OS", "RUNNER_TEMP", "RUNNER_TOOL_CACHE"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ predicate envVarRead(DataFlow::Node node, string envVar) {
|
||||
from DataFlow::Node read, string envVar
|
||||
where
|
||||
envVarRead(read, envVar) and
|
||||
not read.getFile().getBaseName().matches("%.test.ts") and
|
||||
not isSafeForDefaultSetup(envVar)
|
||||
select read,
|
||||
"The environment variable " + envVar +
|
||||
|
||||
@@ -5,7 +5,7 @@ import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { computeAutomationID, createStatusReportBase } from "./api-client";
|
||||
import { computeAutomationID } from "./api-client";
|
||||
import { EnvVar } from "./environment";
|
||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||
import { initializeEnvironment, withTmpDir } from "./util";
|
||||
@@ -267,53 +267,3 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
||||
getAdditionalInputStub.restore();
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
process.env["GITHUB_RUN_ID"] = "100";
|
||||
process.env["GITHUB_RUN_ATTEMPT"] = "2";
|
||||
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||
process.env["RUNNER_OS"] = "macOS";
|
||||
|
||||
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
"init",
|
||||
"failure",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
);
|
||||
|
||||
t.assert(typeof statusReport.job_run_uuid === "string");
|
||||
t.assert(statusReport.workflow_run_id === 100);
|
||||
t.assert(statusReport.workflow_run_attempt === 2);
|
||||
t.assert(
|
||||
statusReport.workflow_name === (process.env["GITHUB_WORKFLOW"] || ""),
|
||||
);
|
||||
t.assert(statusReport.job_name === (process.env["GITHUB_JOB"] || ""));
|
||||
t.assert(statusReport.analysis_key === "analysis-key");
|
||||
t.assert(statusReport.commit_oid === process.env["GITHUB_SHA"]);
|
||||
t.assert(statusReport.ref === process.env["GITHUB_REF"]);
|
||||
t.assert(statusReport.action_name === "init");
|
||||
t.assert(statusReport.action_oid === "unknown");
|
||||
t.assert(
|
||||
statusReport.started_at === process.env[EnvVar.WORKFLOW_STARTED_AT],
|
||||
);
|
||||
t.assert(
|
||||
statusReport.action_started_at ===
|
||||
new Date("May 19, 2023 05:19:00").toISOString(),
|
||||
);
|
||||
t.assert(statusReport.status === "failure");
|
||||
t.assert(statusReport.cause === "failure cause");
|
||||
t.assert(statusReport.exception === "exception stack trace");
|
||||
t.assert(statusReport.runner_os === process.env["RUNNER_OS"]);
|
||||
t.assert(typeof statusReport.action_version === "string");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -239,126 +239,6 @@ function getRefFromEnv(): string {
|
||||
return refEnv;
|
||||
}
|
||||
|
||||
export type ActionName =
|
||||
| "init"
|
||||
| "autobuild"
|
||||
| "finish"
|
||||
| "upload-sarif"
|
||||
| "init-post"
|
||||
| "resolve-environment";
|
||||
export type ActionStatus =
|
||||
| "starting"
|
||||
| "aborted"
|
||||
| "success"
|
||||
| "failure"
|
||||
| "user-error";
|
||||
|
||||
// Any status report may include an array of EventReports associated with it.
|
||||
export interface EventReport {
|
||||
/** An enumerable description of the event. */
|
||||
event: string;
|
||||
/** Time this event started. */
|
||||
started_at: string;
|
||||
/** Time this event ended. */
|
||||
completed_at: string;
|
||||
/** eg: `success`, `failure`, `timeout`, etc. */
|
||||
exit_status?: string;
|
||||
/** If the event is language-specific. */
|
||||
language?: string;
|
||||
/**
|
||||
* A generic JSON blob of data related to this event.
|
||||
* Use Object.assign() to append additional fields to the object.
|
||||
*/
|
||||
properties?: object;
|
||||
}
|
||||
|
||||
export interface StatusReportBase {
|
||||
/**
|
||||
* UUID representing the job run that this status report belongs to. We
|
||||
* generate our own UUID here because Actions currently does not expose a
|
||||
* unique job run identifier. This UUID will allow us to more easily match
|
||||
* reports from different steps in the same workflow job.
|
||||
*
|
||||
* If and when Actions does expose a unique job ID, we plan to populate a
|
||||
* separate int field, `job_run_id`, with the Actions-generated identifier,
|
||||
* as it will allow us to more easily join our telemetry data with Actions
|
||||
* telemetry tables.
|
||||
*/
|
||||
job_run_uuid: string;
|
||||
/** ID of the workflow run containing the action run. */
|
||||
workflow_run_id: number;
|
||||
/** Attempt number of the run containing the action run. */
|
||||
workflow_run_attempt: number;
|
||||
/** Workflow name. Converted to analysis_name further down the pipeline.. */
|
||||
workflow_name: string;
|
||||
/** Job name from the workflow. */
|
||||
job_name: string;
|
||||
/** Analysis key, normally composed from the workflow path and job name. */
|
||||
analysis_key: string;
|
||||
/** Value of the matrix for this instantiation of the job. */
|
||||
matrix_vars?: string;
|
||||
/** Commit oid that the workflow was triggered on. */
|
||||
commit_oid: string;
|
||||
/** Ref that the workflow was triggered on. */
|
||||
ref: string;
|
||||
/** Name of the action being executed. */
|
||||
action_name: ActionName;
|
||||
/** Version of the action being executed, as a ref. */
|
||||
action_ref?: string;
|
||||
/** Version of the action being executed, as a commit oid. */
|
||||
action_oid: string;
|
||||
/** Time the first action started. Normally the init action. */
|
||||
started_at: string;
|
||||
/** Time this action started. */
|
||||
action_started_at: string;
|
||||
/** Time this action completed, or undefined if not yet completed. */
|
||||
completed_at?: string;
|
||||
/** State this action is currently in. */
|
||||
status: ActionStatus;
|
||||
/**
|
||||
* Testing environment: Set if non-production environment.
|
||||
* The server accepts one of the following values:
|
||||
* `["", "qa-rc", "qa-rc-1", "qa-rc-2", "qa-experiment-1", "qa-experiment-2", "qa-experiment-3"]`.
|
||||
*/
|
||||
testing_environment: string;
|
||||
/**
|
||||
* Information about the enablement of the ML-powered JS query pack.
|
||||
*
|
||||
* @see {@link util.getMlPoweredJsQueriesStatus}
|
||||
*/
|
||||
ml_powered_javascript_queries?: string;
|
||||
/** Cause of the failure (or undefined if status is not failure). */
|
||||
cause?: string;
|
||||
/** Stack trace of the failure (or undefined if status is not failure). */
|
||||
exception?: string;
|
||||
/** Action runner operating system (context runner.os). */
|
||||
runner_os: string;
|
||||
/** Action runner hardware architecture (context runner.arch). */
|
||||
runner_arch?: string;
|
||||
/** Action runner operating system release (x.y.z from os.release()). */
|
||||
runner_os_release?: string;
|
||||
/** Action version (x.y.z from package.json). */
|
||||
action_version: string;
|
||||
/** CodeQL CLI version (x.y.z from the CLI). */
|
||||
codeql_version?: string;
|
||||
}
|
||||
|
||||
export interface DatabaseCreationTimings {
|
||||
scanned_language_extraction_duration_ms?: number;
|
||||
trap_import_duration_ms?: number;
|
||||
}
|
||||
|
||||
export function getActionsStatus(
|
||||
error?: unknown,
|
||||
otherFailureCause?: string,
|
||||
): ActionStatus {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof UserError ? "user-error" : "failure";
|
||||
} else {
|
||||
return "success";
|
||||
}
|
||||
}
|
||||
|
||||
export function getActionVersion(): string {
|
||||
return pkg.version!;
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as api from "./api-client";
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
setupTests,
|
||||
setupActionsVars,
|
||||
@@ -27,9 +28,9 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(api, "createStatusReportBase")
|
||||
.resolves({} as actionsUtil.StatusReportBase);
|
||||
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({} as statusReport.StatusReportBase);
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves(true);
|
||||
sinon.stub(actionsUtil, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
|
||||
@@ -5,6 +5,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import * as analyze from "./analyze";
|
||||
import * as api from "./api-client";
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
setupTests,
|
||||
setupActionsVars,
|
||||
@@ -27,9 +28,9 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_API_URL"] = "https://api.github.com";
|
||||
sinon
|
||||
.stub(api, "createStatusReportBase")
|
||||
.resolves({} as actionsUtil.StatusReportBase);
|
||||
sinon.stub(api, "sendStatusReport").resolves(true);
|
||||
.stub(statusReport, "createStatusReportBase")
|
||||
.resolves({} as statusReport.StatusReportBase);
|
||||
sinon.stub(statusReport, "sendStatusReport").resolves(true);
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
|
||||
@@ -5,7 +5,6 @@ import { performance } from "perf_hooks";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { DatabaseCreationTimings } from "./actions-util";
|
||||
import {
|
||||
CodeQLAnalysisError,
|
||||
dbIsFinalized,
|
||||
@@ -15,7 +14,6 @@ import {
|
||||
runQueries,
|
||||
} from "./analyze";
|
||||
import { getApiDetails, getGitHubVersion } from "./api-client";
|
||||
import * as api from "./api-client";
|
||||
import { runAutobuild } from "./autobuild";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config, getConfig, getMlPoweredJsQueriesStatus } from "./config-utils";
|
||||
@@ -25,6 +23,13 @@ import { Feature, Features } from "./feature-flags";
|
||||
import { Language } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
DatabaseCreationTimings,
|
||||
getActionsStatus,
|
||||
StatusReportBase,
|
||||
} from "./status-report";
|
||||
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
@@ -36,8 +41,8 @@ interface AnalysisStatusReport
|
||||
QueriesStatusReport {}
|
||||
|
||||
interface FinishStatusReport
|
||||
extends actionsUtil.StatusReportBase,
|
||||
actionsUtil.DatabaseCreationTimings,
|
||||
extends StatusReportBase,
|
||||
DatabaseCreationTimings,
|
||||
AnalysisStatusReport {}
|
||||
|
||||
interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
|
||||
@@ -47,7 +52,7 @@ interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
|
||||
trap_cache_upload_duration_ms: number;
|
||||
}
|
||||
|
||||
export async function sendStatusReport(
|
||||
async function sendStatusReport(
|
||||
startedAt: Date,
|
||||
config: Config | undefined,
|
||||
stats: AnalysisStatusReport | undefined,
|
||||
@@ -57,18 +62,16 @@ export async function sendStatusReport(
|
||||
didUploadTrapCaches: boolean,
|
||||
logger: Logger,
|
||||
) {
|
||||
const status = actionsUtil.getActionsStatus(
|
||||
error,
|
||||
stats?.analyze_failure_language,
|
||||
);
|
||||
const statusReportBase = await api.createStatusReportBase(
|
||||
const status = getActionsStatus(error, stats?.analyze_failure_language);
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"finish",
|
||||
status,
|
||||
startedAt,
|
||||
await util.checkDiskUsage(),
|
||||
error?.message,
|
||||
error?.stack,
|
||||
);
|
||||
const statusReport: FinishStatusReport = {
|
||||
const report: FinishStatusReport = {
|
||||
...statusReportBase,
|
||||
...(config
|
||||
? {
|
||||
@@ -80,15 +83,15 @@ export async function sendStatusReport(
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
...statusReport,
|
||||
...report,
|
||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||
trap_cache_upload_size_bytes: Math.round(
|
||||
await getTotalCacheSize(config.trapCaches, logger),
|
||||
),
|
||||
};
|
||||
await api.sendStatusReport(trapCacheUploadStatusReport);
|
||||
await statusReport.sendStatusReport(trapCacheUploadStatusReport);
|
||||
} else {
|
||||
await api.sendStatusReport(statusReport);
|
||||
await statusReport.sendStatusReport(report);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,8 +184,13 @@ async function run() {
|
||||
const logger = getActionsLogger();
|
||||
try {
|
||||
if (
|
||||
!(await api.sendStatusReport(
|
||||
await api.createStatusReportBase("finish", "starting", startedAt),
|
||||
!(await statusReport.sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
"finish",
|
||||
"starting",
|
||||
startedAt,
|
||||
await util.checkDiskUsage(logger),
|
||||
),
|
||||
))
|
||||
) {
|
||||
return;
|
||||
|
||||
@@ -6,7 +6,6 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import { DatabaseCreationTimings, EventReport } from "./actions-util";
|
||||
import * as analysisPaths from "./analysis-paths";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
@@ -18,6 +17,7 @@ import {
|
||||
} from "./feature-flags";
|
||||
import { isScannedLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
import { endTracingForCluster } from "./tracer-config";
|
||||
import { validateSarifFileSchema } from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
@@ -389,7 +389,7 @@ export async function runQueries(
|
||||
}
|
||||
|
||||
if (
|
||||
!(await features.getValue(Feature.NewAnalysisSummaryEnabled, codeql))
|
||||
!(await features.getValue(Feature.AnalysisSummaryV2Enabled, codeql))
|
||||
) {
|
||||
await runPrintLinesOfCode(language);
|
||||
}
|
||||
|
||||
@@ -1,31 +1,14 @@
|
||||
import * as os from "os";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as githubUtils from "@actions/github/lib/utils";
|
||||
import * as retry from "@octokit/plugin-retry";
|
||||
import consoleLogLevel from "console-log-level";
|
||||
|
||||
import { getActionVersion, getRequiredInput } from "./actions-util";
|
||||
import {
|
||||
ActionName,
|
||||
ActionStatus,
|
||||
StatusReportBase,
|
||||
getActionVersion,
|
||||
getOptionalInput,
|
||||
getRef,
|
||||
getRequiredInput,
|
||||
getWorkflowEventName,
|
||||
getWorkflowRunAttempt,
|
||||
getWorkflowRunID,
|
||||
} from "./actions-util";
|
||||
import { EnvVar } from "./environment";
|
||||
import {
|
||||
getCachedCodeQlVersion,
|
||||
getRequiredEnvParam,
|
||||
GITHUB_DOTCOM_URL,
|
||||
GitHubVariant,
|
||||
GitHubVersion,
|
||||
isHTTPError,
|
||||
isInTestMode,
|
||||
parseGitHubUrl,
|
||||
parseMatrixInput,
|
||||
} from "./util";
|
||||
@@ -136,187 +119,6 @@ export async function getGitHubVersion(): Promise<GitHubVersion> {
|
||||
return cachedGitHubVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
status: ActionStatus,
|
||||
actionStartedAt: Date,
|
||||
cause?: string,
|
||||
exception?: string,
|
||||
): Promise<StatusReportBase> {
|
||||
const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = getWorkflowRunID();
|
||||
const workflowRunAttempt = getWorkflowRunAttempt();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
||||
const codeQlCliVersion = getCachedCodeQlVersion();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
|
||||
const statusReport: StatusReportBase = {
|
||||
job_run_uuid: jobRunUUID,
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key,
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
action_name: actionName,
|
||||
action_ref: actionRef,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
runner_os: runnerOs,
|
||||
action_version: getActionVersion(),
|
||||
};
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (
|
||||
status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error"
|
||||
) {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
const GENERIC_403_MSG =
|
||||
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG =
|
||||
"Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG =
|
||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG =
|
||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
export async function sendStatusReport<S extends StatusReportBase>(
|
||||
statusReport: S,
|
||||
): Promise<boolean> {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
if (isInTestMode()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = getApiClient();
|
||||
|
||||
try {
|
||||
await client.request(
|
||||
"PUT /repos/:owner/:repo/code-scanning/analysis/status",
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
data: statusReportJSON,
|
||||
},
|
||||
);
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
if (isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if (
|
||||
getWorkflowEventName() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]"
|
||||
) {
|
||||
core.setFailed(
|
||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.",
|
||||
);
|
||||
} else {
|
||||
core.setFailed(e.message || GENERIC_403_MSG);
|
||||
}
|
||||
return false;
|
||||
case 404:
|
||||
core.setFailed(GENERIC_404_MSG);
|
||||
return false;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
// this means that this action version is no longer compatible with the API
|
||||
// we still want to continue as it is likely the analysis endpoint will work
|
||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
||||
core.debug(INCOMPATIBLE_MSG);
|
||||
} else {
|
||||
core.debug(OUT_OF_DATE_MSG);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.error(
|
||||
"An unexpected error occurred when sending code scanning status report.",
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path of the currently executing workflow relative to the repository root.
|
||||
*/
|
||||
|
||||
@@ -1,23 +1,24 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import {
|
||||
getActionsStatus,
|
||||
getActionVersion,
|
||||
getOptionalInput,
|
||||
getTemporaryDirectory,
|
||||
StatusReportBase,
|
||||
} from "./actions-util";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
getGitHubVersion,
|
||||
sendStatusReport,
|
||||
} from "./api-client";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { determineAutobuildLanguages, runAutobuild } from "./autobuild";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Language } from "./languages";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { Logger, getActionsLogger } from "./logging";
|
||||
import {
|
||||
StatusReportBase,
|
||||
getActionsStatus,
|
||||
createStatusReportBase,
|
||||
sendStatusReport,
|
||||
} from "./status-report";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
checkGitHubVersionInRange,
|
||||
initializeEnvironment,
|
||||
wrapError,
|
||||
@@ -31,6 +32,7 @@ interface AutobuildStatusReport extends StatusReportBase {
|
||||
}
|
||||
|
||||
async function sendCompletedStatusReport(
|
||||
logger: Logger,
|
||||
startedAt: Date,
|
||||
allLanguages: string[],
|
||||
failingLanguage?: string,
|
||||
@@ -43,6 +45,7 @@ async function sendCompletedStatusReport(
|
||||
"autobuild",
|
||||
status,
|
||||
startedAt,
|
||||
await checkDiskUsage(logger),
|
||||
cause?.message,
|
||||
cause?.stack,
|
||||
);
|
||||
@@ -62,7 +65,12 @@ async function run() {
|
||||
try {
|
||||
if (
|
||||
!(await sendStatusReport(
|
||||
await createStatusReportBase("autobuild", "starting", startedAt),
|
||||
await createStatusReportBase(
|
||||
"autobuild",
|
||||
"starting",
|
||||
startedAt,
|
||||
await checkDiskUsage(logger),
|
||||
),
|
||||
))
|
||||
) {
|
||||
return;
|
||||
@@ -101,6 +109,7 @@ async function run() {
|
||||
`We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. ${error.message}`,
|
||||
);
|
||||
await sendCompletedStatusReport(
|
||||
logger,
|
||||
startedAt,
|
||||
languages ?? [],
|
||||
currentLanguage,
|
||||
@@ -109,7 +118,7 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
|
||||
await sendCompletedStatusReport(startedAt, languages ?? []);
|
||||
await sendCompletedStatusReport(logger, startedAt, languages ?? []);
|
||||
}
|
||||
|
||||
async function runWrapper() {
|
||||
|
||||
@@ -165,6 +165,31 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
|
||||
});
|
||||
});
|
||||
|
||||
test("caches semantically versioned bundles using their semantic version number", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const url = mockBundleDownloadApi({
|
||||
tagName: `codeql-bundle-v2.14.0`,
|
||||
isPinned: false,
|
||||
});
|
||||
const result = await codeql.setupCodeQL(
|
||||
url,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
|
||||
t.is(toolcache.findAllVersions("CodeQL").length, 1);
|
||||
t.assert(toolcache.find("CodeQL", `2.14.0`));
|
||||
t.is(result.toolsVersion, `2.14.0`);
|
||||
t.is(result.toolsSource, ToolsSource.Download);
|
||||
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||
});
|
||||
});
|
||||
|
||||
test("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
@@ -1112,7 +1137,7 @@ for (const {
|
||||
"-v",
|
||||
"",
|
||||
stubConfig,
|
||||
createFeatures(featureEnabled ? [Feature.NewAnalysisSummaryEnabled] : []),
|
||||
createFeatures(featureEnabled ? [Feature.AnalysisSummaryV2Enabled] : []),
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
t.is(
|
||||
|
||||
@@ -11,7 +11,7 @@ import type { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import {
|
||||
CODEQL_VERSION_INTRA_LAYER_PARALLELISM,
|
||||
CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
||||
CODEQL_VERSION_ANALYSIS_SUMMARY_V2,
|
||||
CodeQLDefaultVersionInfo,
|
||||
Feature,
|
||||
FeatureEnablement,
|
||||
@@ -574,6 +574,13 @@ export async function getCodeQLForCmd(
|
||||
) {
|
||||
extraArgs.push(`--qlconfig-file=${qlconfigFile}`);
|
||||
}
|
||||
|
||||
if (
|
||||
await features.getValue(Feature.LanguageBaselineConfigEnabled, this)
|
||||
) {
|
||||
extraArgs.push("--calculate-language-specific-baseline");
|
||||
}
|
||||
|
||||
await runTool(
|
||||
cmd,
|
||||
[
|
||||
@@ -856,10 +863,10 @@ export async function getCodeQLForCmd(
|
||||
} else if (await util.codeQlVersionAbove(this, "2.12.4")) {
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (await features.getValue(Feature.NewAnalysisSummaryEnabled, this)) {
|
||||
if (await features.getValue(Feature.AnalysisSummaryV2Enabled, this)) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
} else if (
|
||||
await util.codeQlVersionAbove(this, CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)
|
||||
await util.codeQlVersionAbove(this, CODEQL_VERSION_ANALYSIS_SUMMARY_V2)
|
||||
) {
|
||||
codeqlArgs.push("--no-new-analysis-summary");
|
||||
}
|
||||
@@ -1162,6 +1169,7 @@ async function runTool(
|
||||
) {
|
||||
let output = "";
|
||||
let error = "";
|
||||
process.stdout.write(`[command]${cmd} ${args.join(" ")}\n`);
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
ignoreReturnCode: true,
|
||||
listeners: {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.14.1",
|
||||
"cliVersion": "2.14.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.14.0",
|
||||
"priorCliVersion": "2.14.0"
|
||||
"bundleVersion": "codeql-bundle-v2.14.2",
|
||||
"cliVersion": "2.14.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.14.1",
|
||||
"priorCliVersion": "2.14.1"
|
||||
}
|
||||
|
||||
@@ -1,41 +1,37 @@
|
||||
export enum EnvVar {
|
||||
/** Set to true when the `analyze` Action completes successfully. */
|
||||
/** Whether the `analyze` Action completes successfully. */
|
||||
ANALYZE_DID_COMPLETE_SUCCESSFULLY = "CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY",
|
||||
|
||||
/** Set to "true" when the CodeQL Action has invoked the Go autobuilder. */
|
||||
/** Whether the CodeQL Action has invoked the Go autobuilder. */
|
||||
DID_AUTOBUILD_GOLANG = "CODEQL_ACTION_DID_AUTOBUILD_GOLANG",
|
||||
|
||||
/**
|
||||
* Used to disable the SARIF post-processing in the Action that removes duplicate locations from
|
||||
* Whether to disable the SARIF post-processing in the Action that removes duplicate locations from
|
||||
* notifications in the `run[].invocations[].toolExecutionNotifications` SARIF property.
|
||||
*/
|
||||
DISABLE_DUPLICATE_LOCATION_FIX = "CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX",
|
||||
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action is using its
|
||||
* own deprecated and non-standard way of scanning for multiple
|
||||
* languages.
|
||||
* Whether the CodeQL Action is using its own deprecated and non-standard way of scanning for
|
||||
* multiple languages.
|
||||
*/
|
||||
FEATURE_MULTI_LANGUAGE = "CODEQL_ACTION_FEATURE_MULTI_LANGUAGE",
|
||||
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action is using its
|
||||
* own sandwiched workflow mechanism.
|
||||
*/
|
||||
/** Whether the CodeQL Action is using its own sandwiched workflow mechanism. */
|
||||
FEATURE_SANDWICH = "CODEQL_ACTION_FEATURE_SANDWICH",
|
||||
|
||||
/**
|
||||
* If set to a truthy value, then the CodeQL Action might combine SARIF
|
||||
* output from several `interpret-results` runs for the same language.
|
||||
* Whether the CodeQL Action might combine SARIF output from several `interpret-results` runs for
|
||||
* the same language.
|
||||
*/
|
||||
FEATURE_SARIF_COMBINE = "CODEQL_ACTION_FEATURE_SARIF_COMBINE",
|
||||
|
||||
/**
|
||||
* If set to the "true" string, then the CodeQL Action will upload SARIF,
|
||||
* not the CLI.
|
||||
*/
|
||||
/** Whether the CodeQL Action will upload SARIF, not the CLI. */
|
||||
FEATURE_WILL_UPLOAD = "CODEQL_ACTION_FEATURE_WILL_UPLOAD",
|
||||
|
||||
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
||||
HAS_WARNED_ABOUT_DISK_SPACE = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE",
|
||||
|
||||
/** UUID representing the current job run. */
|
||||
JOB_RUN_UUID = "JOB_RUN_UUID",
|
||||
|
||||
@@ -44,7 +40,7 @@ export enum EnvVar {
|
||||
/** Whether to suppress the warning if the current CLI will soon be unsupported. */
|
||||
SUPPRESS_DEPRECATED_SOON_WARNING = "CODEQL_ACTION_SUPPRESS_DEPRECATED_SOON_WARNING",
|
||||
|
||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||
/** Whether to disable uploading SARIF results or status reports to the GitHub API */
|
||||
TEST_MODE = "CODEQL_ACTION_TEST_MODE",
|
||||
|
||||
TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT",
|
||||
|
||||
@@ -21,13 +21,18 @@ export const CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = "2.13.4";
|
||||
/**
|
||||
* Versions 2.14.0+ of the CodeQL CLI support new analysis summaries.
|
||||
*/
|
||||
export const CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = "2.14.0";
|
||||
export const CODEQL_VERSION_ANALYSIS_SUMMARY_V2 = "2.14.0";
|
||||
|
||||
/**
|
||||
* Versions 2.14.0+ of the CodeQL CLI support intra-layer parallelism (aka fine-grained parallelism) options.
|
||||
*/
|
||||
export const CODEQL_VERSION_INTRA_LAYER_PARALLELISM = "2.14.0";
|
||||
|
||||
/**
|
||||
* Versions 2.14.2+ of the CodeQL CLI support language-specific baseline configuration.
|
||||
*/
|
||||
export const CODEQL_VERSION_LANGUAGE_BASELINE_CONFIG = "2.14.2";
|
||||
|
||||
export interface CodeQLDefaultVersionInfo {
|
||||
cliVersion: string;
|
||||
tagName: string;
|
||||
@@ -48,14 +53,15 @@ export interface FeatureEnablement {
|
||||
* Each value of this enum should end with `_enabled`.
|
||||
*/
|
||||
export enum Feature {
|
||||
AnalysisSummaryV2Enabled = "analysis_summary_v2_enabled",
|
||||
CliConfigFileEnabled = "cli_config_file_enabled",
|
||||
CodeqlJavaLombokEnabled = "codeql_java_lombok_enabled",
|
||||
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
||||
DisablePythonDependencyInstallationEnabled = "disable_python_dependency_installation_enabled",
|
||||
EvaluatorIntraLayerParallelismEnabled = "evaluator_intra_layer_parallelism_enabled",
|
||||
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
|
||||
LanguageBaselineConfigEnabled = "language_baseline_config_enabled",
|
||||
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
||||
NewAnalysisSummaryEnabled = "new_analysis_summary_enabled",
|
||||
QaTelemetryEnabled = "qa_telemetry_enabled",
|
||||
ScalingReservedRamEnabled = "scaling_reserved_ram_enabled",
|
||||
UploadFailedSarifEnabled = "upload_failed_sarif_enabled",
|
||||
@@ -65,6 +71,11 @@ export const featureConfig: Record<
|
||||
Feature,
|
||||
{ envVar: string; minimumVersion: string | undefined; defaultValue: boolean }
|
||||
> = {
|
||||
[Feature.AnalysisSummaryV2Enabled]: {
|
||||
envVar: "CODEQL_ACTION_ANALYSIS_SUMMARY_V2",
|
||||
minimumVersion: CODEQL_VERSION_ANALYSIS_SUMMARY_V2,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.CodeqlJavaLombokEnabled]: {
|
||||
envVar: "CODEQL_JAVA_LOMBOK",
|
||||
minimumVersion: "2.14.0",
|
||||
@@ -90,16 +101,16 @@ export const featureConfig: Record<
|
||||
minimumVersion: "2.12.4",
|
||||
defaultValue: true,
|
||||
},
|
||||
[Feature.LanguageBaselineConfigEnabled]: {
|
||||
envVar: "CODEQL_ACTION_LANGUAGE_BASELINE_CONFIG",
|
||||
minimumVersion: CODEQL_VERSION_LANGUAGE_BASELINE_CONFIG,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.NewAnalysisSummaryEnabled]: {
|
||||
envVar: "CODEQL_ACTION_NEW_ANALYSIS_SUMMARY",
|
||||
minimumVersion: CODEQL_VERSION_NEW_ANALYSIS_SUMMARY,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.QaTelemetryEnabled]: {
|
||||
envVar: "CODEQL_ACTION_QA_TELEMETRY",
|
||||
minimumVersion: undefined,
|
||||
|
||||
@@ -6,23 +6,21 @@
|
||||
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import {
|
||||
getActionsStatus,
|
||||
getTemporaryDirectory,
|
||||
printDebugLogs,
|
||||
StatusReportBase,
|
||||
} from "./actions-util";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
getGitHubVersion,
|
||||
sendStatusReport,
|
||||
} from "./api-client";
|
||||
import { getTemporaryDirectory, printDebugLogs } from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import { Features } from "./feature-flags";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
StatusReportBase,
|
||||
sendStatusReport,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
} from "./status-report";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
checkGitHubVersionInRange,
|
||||
getRequiredEnvParam,
|
||||
wrapError,
|
||||
@@ -69,6 +67,7 @@ async function runWrapper() {
|
||||
"init-post",
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
@@ -79,6 +78,7 @@ async function runWrapper() {
|
||||
"init-post",
|
||||
"success",
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
);
|
||||
const statusReport: InitPostStatusReport = {
|
||||
...statusReportBase,
|
||||
|
||||
@@ -4,18 +4,12 @@ import * as core from "@actions/core";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
|
||||
import {
|
||||
getActionsStatus,
|
||||
getActionVersion,
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
StatusReportBase,
|
||||
} from "./actions-util";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
getGitHubVersion,
|
||||
sendStatusReport,
|
||||
} from "./api-client";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { getMlPoweredJsQueriesStatus } from "./config-utils";
|
||||
@@ -26,8 +20,15 @@ import { Language } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
StatusReportBase,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
} from "./status-report";
|
||||
import { getTotalCacheSize } from "./trap-caching";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
checkForTimeout,
|
||||
checkGitHubVersionInRange,
|
||||
DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
@@ -102,6 +103,7 @@ async function sendCompletedStatusReport(
|
||||
"init",
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
await checkDiskUsage(logger),
|
||||
error?.message,
|
||||
error?.stack,
|
||||
);
|
||||
@@ -222,6 +224,7 @@ async function run() {
|
||||
"init",
|
||||
"starting",
|
||||
startedAt,
|
||||
await checkDiskUsage(logger),
|
||||
workflowErrors,
|
||||
),
|
||||
))
|
||||
@@ -302,6 +305,7 @@ async function run() {
|
||||
"init",
|
||||
error instanceof UserError ? "user-error" : "aborted",
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import {
|
||||
getActionsStatus,
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
} from "./actions-util";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
getGitHubVersion,
|
||||
sendStatusReport,
|
||||
} from "./api-client";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CommandInvocationError } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { Language, resolveAlias } from "./languages";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { runResolveBuildEnvironment } from "./resolve-environment";
|
||||
import { checkForTimeout, checkGitHubVersionInRange, wrapError } from "./util";
|
||||
import {
|
||||
sendStatusReport,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
} from "./status-report";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
checkForTimeout,
|
||||
checkGitHubVersionInRange,
|
||||
wrapError,
|
||||
} from "./util";
|
||||
|
||||
const ACTION_NAME = "resolve-environment";
|
||||
const ENVIRONMENT_OUTPUT_NAME = "environment";
|
||||
@@ -29,7 +34,12 @@ async function run() {
|
||||
try {
|
||||
if (
|
||||
!(await sendStatusReport(
|
||||
await createStatusReportBase(ACTION_NAME, "starting", startedAt),
|
||||
await createStatusReportBase(
|
||||
ACTION_NAME,
|
||||
"starting",
|
||||
startedAt,
|
||||
await checkDiskUsage(logger),
|
||||
),
|
||||
))
|
||||
) {
|
||||
return;
|
||||
@@ -74,6 +84,7 @@ async function run() {
|
||||
ACTION_NAME,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
@@ -84,7 +95,12 @@ async function run() {
|
||||
}
|
||||
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(ACTION_NAME, "success", startedAt),
|
||||
await createStatusReportBase(
|
||||
ACTION_NAME,
|
||||
"success",
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,10 @@ import * as api from "./api-client";
|
||||
// creation scripts. Ensure that any changes to the format of this file are compatible with both of
|
||||
// these dependents.
|
||||
import * as defaults from "./defaults.json";
|
||||
import { CodeQLDefaultVersionInfo } from "./feature-flags";
|
||||
import {
|
||||
CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED,
|
||||
CodeQLDefaultVersionInfo,
|
||||
} from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as util from "./util";
|
||||
import { isGoodVersion, wrapError } from "./util";
|
||||
@@ -610,20 +613,12 @@ export async function downloadCodeQL(
|
||||
);
|
||||
}
|
||||
|
||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||
// if the user requests the same URL again, we can get it from the cache without having to call
|
||||
// any of the Releases API.
|
||||
//
|
||||
// Special case: If the CLI version is a pre-release or contains build metadata, then cache the
|
||||
// bundle as `0.0.0-<bundleVersion>` to avoid the bundle being interpreted as containing a stable
|
||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||
const toolcacheVersion = maybeCliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
||||
? `${maybeCliVersion}-${bundleVersion}`
|
||||
: convertToSemVer(bundleVersion, logger);
|
||||
|
||||
logger.debug("Caching CodeQL bundle.");
|
||||
const toolcacheVersion = getCanonicalToolcacheVersion(
|
||||
maybeCliVersion,
|
||||
bundleVersion,
|
||||
logger,
|
||||
);
|
||||
const toolcachedBundlePath = await toolcache.cacheDir(
|
||||
extractedBundlePath,
|
||||
"CodeQL",
|
||||
@@ -656,6 +651,38 @@ export function getCodeQLURLVersion(url: string): string {
|
||||
return match[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the toolcache version number to use to store the bundle with the associated CLI version
|
||||
* and bundle version.
|
||||
*
|
||||
* This is the canonical version number, since toolcaches populated by different versions of the
|
||||
* CodeQL Action or different runner image creation scripts may store the bundle using a different
|
||||
* version number. Functions like `getCodeQLSource` that fetch the bundle from rather than save the
|
||||
* bundle to the toolcache should handle these different version numbers.
|
||||
*/
|
||||
function getCanonicalToolcacheVersion(
|
||||
cliVersion: string | undefined,
|
||||
bundleVersion: string,
|
||||
logger: Logger,
|
||||
) {
|
||||
// If the CLI version is a pre-release or contains build metadata, then cache the
|
||||
// bundle as `0.0.0-<bundleVersion>` to avoid the bundle being interpreted as containing a stable
|
||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||
if (!cliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)) {
|
||||
return convertToSemVer(bundleVersion, logger);
|
||||
}
|
||||
// If the bundle is semantically versioned, it can be looked up based on just the CLI version
|
||||
// number, so version it in the toolcache using just the CLI version number.
|
||||
if (semver.gte(cliVersion, CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED)) {
|
||||
return cliVersion;
|
||||
}
|
||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||
// we can find the bundle in the toolcache based on either the CLI version or the bundle version.
|
||||
return `${cliVersion}-${bundleVersion}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains the CodeQL bundle, installs it in the toolcache if appropriate, and extracts it.
|
||||
*
|
||||
|
||||
60
src/status-report.test.ts
Normal file
60
src/status-report.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { EnvVar } from "./environment";
|
||||
import { createStatusReportBase } from "./status-report";
|
||||
import { setupTests, setupActionsVars } from "./testing-utils";
|
||||
import { withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("createStatusReportBase", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
process.env["CODEQL_ACTION_ANALYSIS_KEY"] = "analysis-key";
|
||||
process.env["GITHUB_REF"] = "refs/heads/main";
|
||||
process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld";
|
||||
process.env["GITHUB_RUN_ATTEMPT"] = "2";
|
||||
process.env["GITHUB_RUN_ID"] = "100";
|
||||
process.env["GITHUB_SHA"] = "a".repeat(40);
|
||||
process.env["ImageVersion"] = "2023.05.19.1";
|
||||
process.env["RUNNER_OS"] = "macOS";
|
||||
|
||||
const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
getRequiredInput.withArgs("matrix").resolves("input/matrix");
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
"init",
|
||||
"failure",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
{ numAvailableBytes: 100, numTotalBytes: 500 },
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
);
|
||||
|
||||
t.is(statusReport.action_name, "init");
|
||||
t.is(statusReport.action_oid, "unknown");
|
||||
t.is(typeof statusReport.action_version, "string");
|
||||
t.is(
|
||||
statusReport.action_started_at,
|
||||
new Date("May 19, 2023 05:19:00").toISOString(),
|
||||
);
|
||||
t.is(statusReport.analysis_key, "analysis-key");
|
||||
t.is(statusReport.cause, "failure cause");
|
||||
t.is(statusReport.commit_oid, process.env["GITHUB_SHA"]);
|
||||
t.is(statusReport.exception, "exception stack trace");
|
||||
t.is(statusReport.job_name, process.env["GITHUB_JOB"] || "");
|
||||
t.is(typeof statusReport.job_run_uuid, "string");
|
||||
t.is(statusReport.ref, process.env["GITHUB_REF"]);
|
||||
t.is(statusReport.runner_available_disk_space_bytes, 100);
|
||||
t.is(statusReport.runner_image_version, process.env["ImageVersion"]);
|
||||
t.is(statusReport.runner_os, process.env["RUNNER_OS"]);
|
||||
t.is(statusReport.started_at, process.env[EnvVar.WORKFLOW_STARTED_AT]!);
|
||||
t.is(statusReport.status, "failure");
|
||||
t.is(statusReport.workflow_name, process.env["GITHUB_WORKFLOW"] || "");
|
||||
t.is(statusReport.workflow_run_attempt, 2);
|
||||
t.is(statusReport.workflow_run_id, 100);
|
||||
});
|
||||
});
|
||||
341
src/status-report.ts
Normal file
341
src/status-report.ts
Normal file
@@ -0,0 +1,341 @@
|
||||
import * as os from "os";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import {
|
||||
getWorkflowEventName,
|
||||
getOptionalInput,
|
||||
getRef,
|
||||
getWorkflowRunID,
|
||||
getWorkflowRunAttempt,
|
||||
getActionVersion,
|
||||
getRequiredInput,
|
||||
} from "./actions-util";
|
||||
import { getAnalysisKey, getApiClient } from "./api-client";
|
||||
import { EnvVar } from "./environment";
|
||||
import {
|
||||
UserError,
|
||||
isHTTPError,
|
||||
getRequiredEnvParam,
|
||||
getCachedCodeQlVersion,
|
||||
isInTestMode,
|
||||
GITHUB_DOTCOM_URL,
|
||||
DiskUsage,
|
||||
} from "./util";
|
||||
|
||||
export type ActionName =
|
||||
| "autobuild"
|
||||
| "finish"
|
||||
| "init"
|
||||
| "init-post"
|
||||
| "resolve-environment"
|
||||
| "upload-sarif";
|
||||
|
||||
export type ActionStatus =
|
||||
| "aborted"
|
||||
| "failure"
|
||||
| "starting"
|
||||
| "success"
|
||||
| "user-error";
|
||||
|
||||
export interface StatusReportBase {
|
||||
/** Name of the action being executed. */
|
||||
action_name: ActionName;
|
||||
/** Version of the action being executed, as a commit oid. */
|
||||
action_oid: string;
|
||||
/** Version of the action being executed, as a ref. */
|
||||
action_ref?: string;
|
||||
/** Time this action started. */
|
||||
action_started_at: string;
|
||||
/** Action version (x.y.z from package.json). */
|
||||
action_version: string;
|
||||
/** Analysis key, normally composed from the workflow path and job name. */
|
||||
analysis_key: string;
|
||||
/** Cause of the failure (or undefined if status is not failure). */
|
||||
cause?: string;
|
||||
/** CodeQL CLI version (x.y.z from the CLI). */
|
||||
codeql_version?: string;
|
||||
/** Commit oid that the workflow was triggered on. */
|
||||
commit_oid: string;
|
||||
/** Time this action completed, or undefined if not yet completed. */
|
||||
completed_at?: string;
|
||||
/** Stack trace of the failure (or undefined if status is not failure). */
|
||||
exception?: string;
|
||||
/** Job name from the workflow. */
|
||||
job_name: string;
|
||||
/**
|
||||
* UUID representing the job run that this status report belongs to. We
|
||||
* generate our own UUID here because Actions currently does not expose a
|
||||
* unique job run identifier. This UUID will allow us to more easily match
|
||||
* reports from different steps in the same workflow job.
|
||||
*
|
||||
* If and when Actions does expose a unique job ID, we plan to populate a
|
||||
* separate int field, `job_run_id`, with the Actions-generated identifier,
|
||||
* as it will allow us to more easily join our telemetry data with Actions
|
||||
* telemetry tables.
|
||||
*/
|
||||
job_run_uuid: string;
|
||||
/** Value of the matrix for this instantiation of the job. */
|
||||
matrix_vars?: string;
|
||||
/**
|
||||
* Information about the enablement of the ML-powered JS query pack.
|
||||
*
|
||||
* @see {@link util.getMlPoweredJsQueriesStatus}
|
||||
*/
|
||||
ml_powered_javascript_queries?: string;
|
||||
/** Ref that the workflow was triggered on. */
|
||||
ref: string;
|
||||
/** Action runner hardware architecture (context runner.arch). */
|
||||
runner_arch?: string;
|
||||
/** Available disk space on the runner, in bytes. */
|
||||
runner_available_disk_space_bytes: number;
|
||||
/**
|
||||
* Version of the runner image, for workflows running on GitHub-hosted runners. Absent otherwise.
|
||||
*/
|
||||
runner_image_version?: string;
|
||||
/** Action runner operating system (context runner.os). */
|
||||
runner_os: string;
|
||||
/** Action runner operating system release (x.y.z from os.release()). */
|
||||
runner_os_release?: string;
|
||||
/** Total disk space on the runner, in bytes. */
|
||||
runner_total_disk_space_bytes: number;
|
||||
/** Time the first action started. Normally the init action. */
|
||||
started_at: string;
|
||||
/** State this action is currently in. */
|
||||
status: ActionStatus;
|
||||
/**
|
||||
* Testing environment: Set if non-production environment.
|
||||
* The server accepts one of the following values:
|
||||
* `["", "qa-rc", "qa-rc-1", "qa-rc-2", "qa-experiment-1", "qa-experiment-2", "qa-experiment-3"]`.
|
||||
*/
|
||||
testing_environment: string;
|
||||
/** Workflow name. Converted to analysis_name further down the pipeline.. */
|
||||
workflow_name: string;
|
||||
/** Attempt number of the run containing the action run. */
|
||||
workflow_run_attempt: number;
|
||||
/** ID of the workflow run containing the action run. */
|
||||
workflow_run_id: number;
|
||||
}
|
||||
|
||||
export interface DatabaseCreationTimings {
|
||||
scanned_language_extraction_duration_ms?: number;
|
||||
trap_import_duration_ms?: number;
|
||||
}
|
||||
|
||||
export function getActionsStatus(
|
||||
error?: unknown,
|
||||
otherFailureCause?: string,
|
||||
): ActionStatus {
|
||||
if (error || otherFailureCause) {
|
||||
return error instanceof UserError ? "user-error" : "failure";
|
||||
} else {
|
||||
return "success";
|
||||
}
|
||||
}
|
||||
|
||||
// Any status report may include an array of EventReports associated with it.
|
||||
export interface EventReport {
|
||||
/** Time this event ended. */
|
||||
completed_at: string;
|
||||
/** An enumerable description of the event. */
|
||||
event: string;
|
||||
/** eg: `success`, `failure`, `timeout`, etc. */
|
||||
exit_status?: string;
|
||||
/** If the event is language-specific. */
|
||||
language?: string;
|
||||
/**
|
||||
* A generic JSON blob of data related to this event.
|
||||
* Use Object.assign() to append additional fields to the object.
|
||||
*/
|
||||
properties?: object;
|
||||
/** Time this event started. */
|
||||
started_at: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose a StatusReport.
|
||||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
status: ActionStatus,
|
||||
actionStartedAt: Date,
|
||||
diskInfo: DiskUsage,
|
||||
cause?: string,
|
||||
exception?: string,
|
||||
): Promise<StatusReportBase> {
|
||||
const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = getWorkflowRunID();
|
||||
const workflowRunAttempt = getWorkflowRunAttempt();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
||||
const codeQlCliVersion = getCachedCodeQlVersion();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
|
||||
const statusReport: StatusReportBase = {
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_key,
|
||||
commit_oid: commitOid,
|
||||
job_name: jobName,
|
||||
job_run_uuid: jobRunUUID,
|
||||
ref,
|
||||
runner_available_disk_space_bytes: diskInfo.numAvailableBytes,
|
||||
runner_os: runnerOs,
|
||||
runner_total_disk_space_bytes: diskInfo.numTotalBytes,
|
||||
started_at: workflowStartedAt,
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
workflow_name: workflowName,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_run_id: workflowRunID,
|
||||
};
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (
|
||||
status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error"
|
||||
) {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion;
|
||||
}
|
||||
const imageVersion = process.env["ImageVersion"];
|
||||
if (imageVersion) {
|
||||
statusReport.runner_image_version = imageVersion;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
const GENERIC_403_MSG =
|
||||
"The repo on which this action is running is not opted-in to CodeQL code scanning.";
|
||||
const GENERIC_404_MSG =
|
||||
"Not authorized to use the CodeQL code scanning feature on this repo.";
|
||||
const OUT_OF_DATE_MSG =
|
||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
const INCOMPATIBLE_MSG =
|
||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
export async function sendStatusReport<S extends StatusReportBase>(
|
||||
statusReport: S,
|
||||
): Promise<boolean> {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
if (isInTestMode()) {
|
||||
core.debug("In test mode. Status reports are not uploaded.");
|
||||
return true;
|
||||
}
|
||||
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
const [owner, repo] = nwo.split("/");
|
||||
const client = getApiClient();
|
||||
|
||||
try {
|
||||
await client.request(
|
||||
"PUT /repos/:owner/:repo/code-scanning/analysis/status",
|
||||
{
|
||||
owner,
|
||||
repo,
|
||||
data: statusReportJSON,
|
||||
},
|
||||
);
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
if (isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
case 403:
|
||||
if (
|
||||
getWorkflowEventName() === "push" &&
|
||||
process.env["GITHUB_ACTOR"] === "dependabot[bot]"
|
||||
) {
|
||||
core.setFailed(
|
||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"See https://docs.github.com/en/code-security/secure-coding/configuring-code-scanning#scanning-on-push for more information on how to configure these events.",
|
||||
);
|
||||
} else {
|
||||
core.setFailed(e.message || GENERIC_403_MSG);
|
||||
}
|
||||
return false;
|
||||
case 404:
|
||||
core.setFailed(GENERIC_404_MSG);
|
||||
return false;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
// this means that this action version is no longer compatible with the API
|
||||
// we still want to continue as it is likely the analysis endpoint will work
|
||||
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
|
||||
core.debug(INCOMPATIBLE_MSG);
|
||||
} else {
|
||||
core.debug(OUT_OF_DATE_MSG);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.error(
|
||||
"An unexpected error occurred when sending code scanning status report.",
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,17 @@ import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getActionVersion } from "./actions-util";
|
||||
import { createStatusReportBase, sendStatusReport } from "./api-client";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
createStatusReportBase,
|
||||
sendStatusReport,
|
||||
StatusReportBase,
|
||||
getActionsStatus,
|
||||
} from "./status-report";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
getRequiredEnvParam,
|
||||
initializeEnvironment,
|
||||
isInTestMode,
|
||||
@@ -14,7 +20,7 @@ import {
|
||||
} from "./util";
|
||||
|
||||
interface UploadSarifStatusReport
|
||||
extends actionsUtil.StatusReportBase,
|
||||
extends StatusReportBase,
|
||||
upload_lib.UploadStatusReport {}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
@@ -25,6 +31,7 @@ async function sendSuccessStatusReport(
|
||||
"upload-sarif",
|
||||
"success",
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
);
|
||||
const statusReport: UploadSarifStatusReport = {
|
||||
...statusReportBase,
|
||||
@@ -35,10 +42,16 @@ async function sendSuccessStatusReport(
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
const logger = getActionsLogger();
|
||||
initializeEnvironment(getActionVersion());
|
||||
if (
|
||||
!(await sendStatusReport(
|
||||
await createStatusReportBase("upload-sarif", "starting", startedAt),
|
||||
await createStatusReportBase(
|
||||
"upload-sarif",
|
||||
"starting",
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
),
|
||||
))
|
||||
) {
|
||||
return;
|
||||
@@ -49,7 +62,7 @@ async function run() {
|
||||
actionsUtil.getRequiredInput("sarif_file"),
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
getActionsLogger(),
|
||||
logger,
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
|
||||
@@ -60,7 +73,7 @@ async function run() {
|
||||
await upload_lib.waitForProcessing(
|
||||
parseRepositoryNwo(getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||
uploadResult.sarifID,
|
||||
getActionsLogger(),
|
||||
logger,
|
||||
);
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
|
||||
@@ -72,8 +85,9 @@ async function run() {
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
"upload-sarif",
|
||||
actionsUtil.getActionsStatus(error),
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
await checkDiskUsage(),
|
||||
message,
|
||||
error.stack,
|
||||
),
|
||||
|
||||
28
src/util.ts
28
src/util.ts
@@ -4,6 +4,7 @@ import * as path from "path";
|
||||
import { promisify } from "util";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import checkDiskSpace from "check-disk-space";
|
||||
import del from "del";
|
||||
import getFolderSize from "get-folder-size";
|
||||
import * as semver from "semver";
|
||||
@@ -844,3 +845,30 @@ export function prettyPrintPack(pack: Pack) {
|
||||
pack.path ? `:${pack.path}` : ""
|
||||
}`;
|
||||
}
|
||||
|
||||
export interface DiskUsage {
|
||||
numAvailableBytes: number;
|
||||
numTotalBytes: number;
|
||||
}
|
||||
|
||||
export async function checkDiskUsage(logger?: Logger): Promise<DiskUsage> {
|
||||
const diskUsage = await checkDiskSpace(
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||
);
|
||||
const gbInBytes = 1024 * 1024 * 1024;
|
||||
if (logger && diskUsage.free < 2 * gbInBytes) {
|
||||
const message =
|
||||
"The Actions runner is running low on disk space " +
|
||||
`(${(diskUsage.free / gbInBytes).toPrecision(4)} GB available).`;
|
||||
if (process.env[EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
|
||||
logger.warning(message);
|
||||
} else {
|
||||
logger.debug(message);
|
||||
}
|
||||
core.exportVariable(EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
|
||||
}
|
||||
return {
|
||||
numAvailableBytes: diskUsage.free,
|
||||
numTotalBytes: diskUsage.size,
|
||||
};
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user