Compare commits

...

34 Commits

Author SHA1 Message Date
Edoardo Pirovano
e095058bfa Merge pull request #828 from github/update-v1.0.24-0b242db7
Merge main into v1
2021-11-23 10:58:34 +00:00
Edoardo Pirovano
2c99f99c4a Merge branch 'v1' into update-v1.0.24-0b242db7 2021-11-23 09:54:57 +00:00
github-actions[bot]
bcd7e6896f 1.0.24 2021-11-23 09:52:25 +00:00
Edoardo Pirovano
0b242db78f Merge pull request #827 from github/2.7.2-release
Bump default CodeQL version to 2.7.2
2021-11-22 15:17:04 +00:00
Edoardo Pirovano
c897659213 Add CHANGELOG note for new bundle version 2021-11-22 13:11:20 +00:00
Edoardo Pirovano
8b902e1723 Bump default CodeQL version to 2.7.2 2021-11-22 13:09:42 +00:00
Edoardo Pirovano
26567f6a49 Merge pull request #819 from github/mergeback/v1.0.23-to-main-a627e9fa
Mergeback v1.0.23 refs/heads/v1 into main
2021-11-20 00:30:04 +00:00
github-actions[bot]
dbf7ac4b37 Update checked-in dependencies 2021-11-19 23:52:01 +00:00
github-actions[bot]
077f7b2532 1.0.24 2021-11-19 23:46:10 +00:00
github-actions[bot]
a392055010 Update changelog and version after v1.0.23 2021-11-19 23:46:10 +00:00
Henning Makholm
0aea878963 fix changelog 2021-11-19 23:46:10 +00:00
github-actions[bot]
bca71988d3 1.0.23 2021-11-19 23:46:10 +00:00
Edoardo Pirovano
02e1cdcd36 Merge pull request #823 from github/fix-update-workflow
Remove `persist-credentials: false` from workflow
2021-11-19 23:45:25 +00:00
Edoardo Pirovano
4860ed1ad4 Remove persist-credentials: false from workflow 2021-11-19 17:55:10 +00:00
Chris Gavin
3e36cddb07 Merge pull request #781 from github/wait-for-processing
Add an option to allow waiting until an analysis has been processed before finishing the Action.
2021-11-18 12:02:43 +00:00
Chris Gavin
b9bd459b70 Add a clarifying comment to a break. 2021-11-17 15:52:36 +00:00
Chris Gavin
215c4f5ff5 Move the delay to the end of the loop. 2021-11-17 15:51:50 +00:00
Chris Gavin
4eef7ef32c Split out waiting for processing. 2021-11-17 13:20:36 +00:00
Chris Gavin
e0b9b9a248 Check for errors in the analysis status response. 2021-11-17 12:33:42 +00:00
Chris Gavin
823bb21bbb Add a default value for wait-for-processing. 2021-11-17 12:33:38 +00:00
Chris Gavin
49fc4c9b40 Reduce log message level.
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2021-11-17 12:14:48 +00:00
Chris Gavin
21a786fda0 Improve a log message.
Co-authored-by: Andrew Eisenberg <aeisenberg@github.com>
2021-11-17 12:14:25 +00:00
Chris Gavin
316ad9d919 Add an option to allow waiting until an analysis has been processed before finishing the Action. 2021-11-17 12:14:13 +00:00
Henning Makholm
a627e9fa50 Merge pull request #818 from github/update-v1.0.23-2ecc17d7
Merge main into v1
2021-11-16 20:56:34 +01:00
Henning Makholm
160021fe53 fix changelog 2021-11-16 20:39:37 +01:00
github-actions[bot]
3f2269bf58 1.0.23 2021-11-16 18:48:44 +00:00
Edoardo Pirovano
2ecc17d74f Merge pull request #817 from edoardopirovano/respect-ld-preload
Respect value of `LD_PRELOAD` given by the CLI
2021-11-16 00:22:50 +00:00
Edoardo Pirovano
9b506fed7c Respect value of LD_PRELOAD given by the CLI 2021-11-15 22:16:59 +00:00
Henning Makholm
2803f4a792 Merge pull request #816 from github/hmakholm/pr/2.7.1
Update bundle to version 2.7.1
2021-11-15 20:27:55 +01:00
Henning Makholm
720bf9d157 Merge remote-tracking branch 'origin/main' into hmakholm/pr/2.7.1 2021-11-15 20:05:55 +01:00
Andrew Eisenberg
bbf0a22e84 Merge pull request #801 from github/aeisenberg/upload-by-category
Allow multiple uploads in a single job
2021-11-15 10:57:49 -08:00
Andrew Eisenberg
d7b5c618a4 Merge branch 'main' into aeisenberg/upload-by-category 2021-11-15 10:33:14 -08:00
Henning Makholm
37a4db94ad Update bundle to version 2.7.1 2021-11-15 19:32:53 +01:00
Andrew Eisenberg
6a98a4b500 Allow multiple uploads in a single job
They must all have a unique category. The category will be
converted into an environment variable.
2021-11-15 09:16:25 -08:00
32 changed files with 355 additions and 91 deletions

View File

@@ -11,8 +11,6 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Remove PR label
env:

View File

@@ -1,8 +1,13 @@
# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
## 1.0.24 - 23 Nov 2021
No user facing changes.
- Update default CodeQL bundle version to 2.7.2. [#827](https://github.com/github/codeql-action/pull/827)
## 1.0.23 - 16 Nov 2021
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)
- Update default CodeQL bundle version to 2.7.1. [#816](https://github.com/github/codeql-action/pull/816)
## 1.0.22 - 04 Nov 2021

View File

@@ -52,6 +52,10 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
token:
default: ${{ github.token }}
matrix:

1
lib/actions-util.js generated
View File

@@ -98,6 +98,7 @@ const getCommitOid = async function (ref = "HEAD") {
}
catch (e) {
core.info(`Failed to call git to get current commit. Continuing with data from environment: ${e}`);
core.info(e.stack || "NO STACK");
return (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
}
};

File diff suppressed because one or more lines are too long

15
lib/analyze-action.js generated
View File

@@ -50,7 +50,7 @@ async function sendStatusReport(startedAt, stats, error) {
exports.sendStatusReport = sendStatusReport;
async function run() {
const startedAt = new Date();
let uploadStats = undefined;
let uploadResult = undefined;
let runStats = undefined;
let config = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -105,13 +105,17 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
}
else {
logger.info("Not uploading results");
}
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
@@ -156,8 +160,11 @@ async function run() {
}
}
}
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
if (runStats && uploadResult) {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
}
else if (runStats) {
await sendStatusReport(startedAt, { ...runStats });

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20211025"
"bundleVersion": "codeql-bundle-20211122"
}

18
lib/tracer-config.js generated
View File

@@ -182,15 +182,15 @@ async function getCombinedTracerConfig(config, codeql) {
tracedLanguageConfigs[language] = await getTracerConfigForLanguage(codeql, config, language);
}
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
}
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(codeQLDir, "tools", "osx64", "libtrace.dylib");
}
else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(codeQLDir, "tools", "linux64", "${LIB}trace.so");
}
}
// On macos it's necessary to prefix the build command with the runner executable
// on order to trace when System Integrity Protection is enabled.

File diff suppressed because one or more lines are too long

98
lib/upload-lib.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
@@ -105,6 +105,7 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
@@ -243,14 +244,7 @@ exports.buildPayload = buildPayload;
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, category, analysisName, workflowRunID, sourceRoot, environment, gitHubVersion, apiDetails, logger) {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
validateUniqueCategory(category);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
validateSarifFileSchema(file, logger);
@@ -270,12 +264,90 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
break;
}
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
});
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
}
else if (status === "failed") {
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
}
}
catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
}
else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
exports.waitForProcessing = waitForProcessing;
function validateUniqueCategory(category) {
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${category ? `_${sanitize(category)}` : ""}`;
if (process.env[sentinelEnvVar]) {
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
"Please specify a unique `category` to call this action multiple times. " +
`Category: ${category ? category : "(none)"}`);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
}
exports.validateUniqueCategory = validateUniqueCategory;
/**
* Santizes a string to be used as an environment variable name.
* This will replace all non-alphanumeric characters with underscores.
* There could still be some false category clashes if two uploads
* occur that differ only in their non-alphanumeric characters. This is
* unlikely.
*
* @param str the initial value to sanitize
*/
function sanitize(str) {
return str.replace(/[^a-zA-Z0-9_]/g, "_");
}
//# sourceMappingURL=upload-lib.js.map

File diff suppressed because one or more lines are too long

16
lib/upload-lib.test.js generated
View File

@@ -113,4 +113,20 @@ ava_1.default.beforeEach(() => {
modifiedSarif = uploadLib.populateRunAutomationDetails(sarif, undefined, analysisKey, '{"os": "linux", "language": "javascript"}');
t.deepEqual(modifiedSarif, expectedSarif);
});
(0, ava_1.default)("validateUniqueCategory", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
t.throws(() => uploadLib.validateUniqueCategory(undefined));
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
t.throws(() => uploadLib.validateUniqueCategory("abc"));
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
t.throws(() => uploadLib.validateUniqueCategory("def"));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
});
//# sourceMappingURL=upload-lib.test.js.map

File diff suppressed because one or more lines are too long

View File

@@ -22,6 +22,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
@@ -46,8 +47,11 @@ async function run() {
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
await sendSuccessStatusReport(startedAt, uploadStats);
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
}
catch (error) {
const message = error instanceof Error ? error.message : String(error);

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

6
lib/util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
@@ -492,4 +492,8 @@ async function bundleDb(config, language, codeql) {
return databaseBundlePath;
}
exports.bundleDb = bundleDb;
async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
exports.delay = delay;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

2
node_modules/.package-lock.json generated vendored
View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "1.0.23",
"version": "1.0.24",
"lockfileVersion": 2,
"requires": true,
"packages": {

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "codeql",
"version": "1.0.23",
"version": "1.0.24",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "codeql",
"version": "1.0.23",
"version": "1.0.24",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^0.5.2",

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "1.0.23",
"version": "1.0.24",
"private": true,
"description": "CodeQL action",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "codeql-runner",
"version": "1.0.23",
"version": "1.0.24",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "codeql-runner",
"version": "1.0.23",
"version": "1.0.24",
"private": true,
"description": "CodeQL runner",
"scripts": {

View File

@@ -85,6 +85,7 @@ export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
core.info(
`Failed to call git to get current commit. Continuing with data from environment: ${e}`
);
core.info((e as Error).stack || "NO STACK");
return getRequiredEnvParam("GITHUB_SHA");
}
};

View File

@@ -18,7 +18,7 @@ import { uploadDatabases } from "./database-upload";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import { UploadStatusReport } from "./upload-lib";
import { UploadResult } from "./upload-lib";
import * as util from "./util";
import { bundleDb, codeQlVersionAbove, DEBUG_ARTIFACT_NAME } from "./util";
@@ -58,7 +58,7 @@ export async function sendStatusReport(
async function run() {
const startedAt = new Date();
let uploadStats: UploadStatusReport | undefined = undefined;
let uploadResult: UploadResult | undefined = undefined;
let runStats: QueriesStatusReport | undefined = undefined;
let config: Config | undefined = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -163,7 +163,7 @@ async function run() {
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadStats = await upload_lib.uploadFromActions(
uploadResult = await upload_lib.uploadFromActions(
outputDir,
config.gitHubVersion,
apiDetails,
@@ -178,6 +178,18 @@ async function run() {
);
await uploadDatabases(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (
uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await upload_lib.waitForProcessing(
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
apiDetails,
getActionsLogger()
);
}
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload: string[] = [];
@@ -227,8 +239,11 @@ async function run() {
}
}
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
if (runStats && uploadResult) {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
} else if (runStats) {
await sendStatusReport(startedAt, { ...runStats });
} else {

View File

@@ -1,3 +1,3 @@
{
"bundleVersion": "codeql-bundle-20211025"
"bundleVersion": "codeql-bundle-20211122"
}

View File

@@ -212,25 +212,25 @@ export async function getCombinedTracerConfig(
);
}
mainTracerConfig = concatTracerConfigs(tracedLanguageConfigs, config);
}
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
codeQLDir,
"tools",
"osx64",
"libtrace.dylib"
);
} else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(
codeQLDir,
"tools",
"linux64",
"${LIB}trace.so"
);
// Add a couple more variables
mainTracerConfig.env["ODASA_TRACER_CONFIGURATION"] = mainTracerConfig.spec;
const codeQLDir = path.dirname(codeql.getPath());
if (process.platform === "darwin") {
mainTracerConfig.env["DYLD_INSERT_LIBRARIES"] = path.join(
codeQLDir,
"tools",
"osx64",
"libtrace.dylib"
);
} else if (process.platform !== "win32") {
mainTracerConfig.env["LD_PRELOAD"] = path.join(
codeQLDir,
"tools",
"linux64",
"${LIB}trace.so"
);
}
}
// On macos it's necessary to prefix the build command with the runner executable

View File

@@ -175,3 +175,24 @@ test("populateRunAutomationDetails", (t) => {
);
t.deepEqual(modifiedSarif, expectedSarif);
});
test("validateUniqueCategory", (t) => {
t.notThrows(() => uploadLib.validateUniqueCategory(undefined));
t.throws(() => uploadLib.validateUniqueCategory(undefined));
t.notThrows(() => uploadLib.validateUniqueCategory("abc"));
t.throws(() => uploadLib.validateUniqueCategory("abc"));
t.notThrows(() => uploadLib.validateUniqueCategory("def"));
t.throws(() => uploadLib.validateUniqueCategory("def"));
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() => uploadLib.validateUniqueCategory("abc/def"));
t.throws(() => uploadLib.validateUniqueCategory("abc@def"));
t.throws(() => uploadLib.validateUniqueCategory("abc_def"));
t.throws(() => uploadLib.validateUniqueCategory("abc def"));
// this one is fine
t.notThrows(() => uploadLib.validateUniqueCategory("abc_ def"));
});

View File

@@ -110,6 +110,8 @@ async function uploadPayload(
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
export interface UploadStatusReport {
@@ -121,6 +123,11 @@ export interface UploadStatusReport {
num_results_in_sarif?: number;
}
export interface UploadResult {
statusReport: UploadStatusReport;
sarifID: string;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
export function findSarifFilesInDir(sarifPath: string): string[] {
@@ -147,7 +154,7 @@ export async function uploadFromActions(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadStatusReport> {
): Promise<UploadResult> {
return await uploadFiles(
getSarifFilePaths(sarifPath),
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
@@ -178,7 +185,7 @@ export async function uploadFromRunner(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadStatusReport> {
): Promise<UploadResult> {
return await uploadFiles(
getSarifFilePaths(sarifPath),
repositoryNwo,
@@ -339,20 +346,11 @@ async function uploadFiles(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadStatusReport> {
): Promise<UploadResult> {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
if (process.env[sentinelEnvVar]) {
throw new Error(
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job"
);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
validateUniqueCategory(category);
// Validate that the files we were asked to upload are all valid SARIF files
for (const file of sarifFiles) {
@@ -399,13 +397,114 @@ async function uploadFiles(
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
const sarifID = await uploadPayload(
payload,
repositoryNwo,
apiDetails,
logger
);
logger.endGroup();
return {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
export async function waitForProcessing(
repositoryNwo: RepositoryNwo,
sarifID: string,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<void> {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (
Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning(
"Timed out waiting for analysis to finish processing. Continuing."
);
break;
}
try {
const response = await client.request(
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
}
);
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
} else if (status === "failed") {
throw new Error(
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
);
}
} catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
} else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
export function validateUniqueCategory(category: string | undefined) {
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner
const sentinelEnvVar = `CODEQL_UPLOAD_SARIF${
category ? `_${sanitize(category)}` : ""
}`;
if (process.env[sentinelEnvVar]) {
throw new Error(
"Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per category. " +
"Please specify a unique `category` to call this action multiple times. " +
`Category: ${category ? category : "(none)"}`
);
}
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
}
}
/**
* Santizes a string to be used as an environment variable name.
* This will replace all non-alphanumeric characters with underscores.
* There could still be some false category clashes if two uploads
* occur that differ only in their non-alphanumeric characters. This is
* unlikely.
*
* @param str the initial value to sanitize
*/
function sanitize(str: string) {
return str.replace(/[^a-zA-Z0-9_]/g, "_");
}

View File

@@ -2,6 +2,7 @@ import * as core from "@actions/core";
import * as actionsUtil from "./actions-util";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import {
getGitHubVersion,
@@ -56,13 +57,21 @@ async function run() {
const gitHubVersion = await getGitHubVersion(apiDetails);
const uploadStats = await upload_lib.uploadFromActions(
const uploadResult = await upload_lib.uploadFromActions(
actionsUtil.getRequiredInput("sarif_file"),
gitHubVersion,
apiDetails,
getActionsLogger()
);
await sendSuccessStatusReport(startedAt, uploadStats);
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing(
parseRepositoryNwo(getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
apiDetails,
getActionsLogger()
);
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
const stack = error instanceof Error ? error.stack : String(error);

View File

@@ -564,3 +564,7 @@ export async function bundleDb(
}
return databaseBundlePath;
}
export async function delay(milliseconds: number) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}

View File

@@ -6,7 +6,7 @@ inputs:
description: |
The SARIF file or directory of SARIF files to be uploaded to GitHub code scanning.
See https://docs.github.com/en/code-security/code-scanning/integrating-with-code-scanning/uploading-a-sarif-file-to-github#uploading-a-code-scanning-analysis-with-github-actions
for information on the maximum number of results and maximum file size supported by code scanning.
for information on the maximum number of results and maximum file size supported by code scanning.
required: false
default: '../results'
checkout_path:
@@ -20,6 +20,10 @@ inputs:
category:
description: String used by Code Scanning for matching the analyses
required: false
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
runs:
using: 'node12'
main: '../lib/upload-sarif-action.js'