Compare commits

..

3 Commits

Author SHA1 Message Date
Henning Makholm
a627e9fa50 Merge pull request #818 from github/update-v1.0.23-2ecc17d7
Merge main into v1
2021-11-16 20:56:34 +01:00
Henning Makholm
160021fe53 fix changelog 2021-11-16 20:39:37 +01:00
github-actions[bot]
3f2269bf58 1.0.23 2021-11-16 18:48:44 +00:00
21 changed files with 39 additions and 224 deletions

View File

@@ -11,6 +11,8 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Remove PR label
env:

View File

@@ -1,9 +1,5 @@
# CodeQL Action and CodeQL Runner Changelog
## [UNRELEASED]
No user facing changes.
## 1.0.23 - 16 Nov 2021
- The `upload-sarif` action now allows multiple uploads in a single job, as long as they have different categories. [#801](https://github.com/github/codeql-action/pull/801)

View File

@@ -52,10 +52,6 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
token:
default: ${{ github.token }}
matrix:

15
lib/analyze-action.js generated
View File

@@ -50,7 +50,7 @@ async function sendStatusReport(startedAt, stats, error) {
exports.sendStatusReport = sendStatusReport;
async function run() {
const startedAt = new Date();
let uploadResult = undefined;
let uploadStats = undefined;
let runStats = undefined;
let config = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -105,17 +105,13 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
uploadStats = await upload_lib.uploadFromActions(outputDir, config.gitHubVersion, apiDetails, logger);
}
else {
logger.info("Not uploading results");
}
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
await (0, database_upload_1.uploadDatabases)(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload = [];
@@ -160,11 +156,8 @@ async function run() {
}
}
}
if (runStats && uploadResult) {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
}
else if (runStats) {
await sendStatusReport(startedAt, { ...runStats });

File diff suppressed because one or more lines are too long

64
lib/upload-lib.js generated
View File

@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
exports.validateUniqueCategory = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const zlib_1 = __importDefault(require("zlib"));
@@ -105,7 +105,6 @@ async function uploadPayload(payload, repositoryNwo, apiDetails, logger) {
});
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
@@ -264,67 +263,14 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
const numResultInSarif = countResultsInSarif(sarifPayload);
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
const sarifID = await uploadPayload(payload, repositoryNwo, apiDetails, logger);
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
async function waitForProcessing(repositoryNwo, sarifID, apiDetails, logger) {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
break;
}
try {
const response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
});
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
}
else if (status === "failed") {
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
}
}
catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
}
else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
exports.waitForProcessing = waitForProcessing;
function validateUniqueCategory(category) {
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner

File diff suppressed because one or more lines are too long

View File

@@ -22,7 +22,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const actionsUtil = __importStar(require("./actions-util"));
const logging_1 = require("./logging");
const repository_1 = require("./repository");
const upload_lib = __importStar(require("./upload-lib"));
const util_1 = require("./util");
// eslint-disable-next-line import/no-commonjs
@@ -47,11 +46,8 @@ async function run() {
url: (0, util_1.getRequiredEnvParam)("GITHUB_SERVER_URL"),
};
const gitHubVersion = await (0, util_1.getGitHubVersion)(apiDetails);
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing((0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), uploadResult.sarifID, apiDetails, (0, logging_1.getActionsLogger)());
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
const uploadStats = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), gitHubVersion, apiDetails, (0, logging_1.getActionsLogger)());
await sendSuccessStatusReport(startedAt, uploadStats);
}
catch (error) {
const message = error instanceof Error ? error.message : String(error);

View File

@@ -1 +1 @@
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YAClE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACjD,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,UAAU,GAAG;YACjB,IAAI,EAAE,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC;YAC3C,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;SAC9C,CAAC;QAEF,MAAM,aAAa,GAAG,MAAM,IAAA,uBAAgB,EAAC,UAAU,CAAC,CAAC;QAEzD,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACpD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,aAAa,EACb,UAAU,EACV,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KACvD;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,SAAS,EACT,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}

6
lib/util.js generated
View File

@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
exports.bundleDb = exports.codeQlVersionAbove = exports.isHTTPError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
const fs = __importStar(require("fs"));
const os = __importStar(require("os"));
const path = __importStar(require("path"));
@@ -492,8 +492,4 @@ async function bundleDb(config, language, codeql) {
return databaseBundlePath;
}
exports.bundleDb = bundleDb;
async function delay(milliseconds) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}
exports.delay = delay;
//# sourceMappingURL=util.js.map

File diff suppressed because one or more lines are too long

2
node_modules/.package-lock.json generated vendored
View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "1.0.24",
"version": "1.0.23",
"lockfileVersion": 2,
"requires": true,
"packages": {

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "codeql",
"version": "1.0.24",
"version": "1.0.23",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "codeql",
"version": "1.0.24",
"version": "1.0.23",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^0.5.2",

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "1.0.24",
"version": "1.0.23",
"private": true,
"description": "CodeQL action",
"scripts": {

View File

@@ -1,6 +1,6 @@
{
"name": "codeql-runner",
"version": "1.0.24",
"version": "1.0.23",
"lockfileVersion": 1,
"requires": true,
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "codeql-runner",
"version": "1.0.24",
"version": "1.0.23",
"private": true,
"description": "CodeQL runner",
"scripts": {

View File

@@ -18,7 +18,7 @@ import { uploadDatabases } from "./database-upload";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import { UploadResult } from "./upload-lib";
import { UploadStatusReport } from "./upload-lib";
import * as util from "./util";
import { bundleDb, codeQlVersionAbove, DEBUG_ARTIFACT_NAME } from "./util";
@@ -58,7 +58,7 @@ export async function sendStatusReport(
async function run() {
const startedAt = new Date();
let uploadResult: UploadResult | undefined = undefined;
let uploadStats: UploadStatusReport | undefined = undefined;
let runStats: QueriesStatusReport | undefined = undefined;
let config: Config | undefined = undefined;
util.initializeEnvironment(util.Mode.actions, pkg.version);
@@ -163,7 +163,7 @@ async function run() {
core.setOutput("db-locations", dbLocations);
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
uploadResult = await upload_lib.uploadFromActions(
uploadStats = await upload_lib.uploadFromActions(
outputDir,
config.gitHubVersion,
apiDetails,
@@ -178,18 +178,6 @@ async function run() {
);
await uploadDatabases(repositoryNwo, config, apiDetails, logger); // Possibly upload the database bundles for remote queries
if (
uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await upload_lib.waitForProcessing(
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
apiDetails,
getActionsLogger()
);
}
if (config.debugMode) {
// Upload the database bundles as an Actions artifact for debugging
const toUpload: string[] = [];
@@ -239,11 +227,8 @@ async function run() {
}
}
if (runStats && uploadResult) {
await sendStatusReport(startedAt, {
...runStats,
...uploadResult.statusReport,
});
if (runStats && uploadStats) {
await sendStatusReport(startedAt, { ...runStats, ...uploadStats });
} else if (runStats) {
await sendStatusReport(startedAt, { ...runStats });
} else {

View File

@@ -110,8 +110,6 @@ async function uploadPayload(
logger.debug(`response status: ${response.status}`);
logger.info("Successfully uploaded results");
return response.data.id;
}
export interface UploadStatusReport {
@@ -123,11 +121,6 @@ export interface UploadStatusReport {
num_results_in_sarif?: number;
}
export interface UploadResult {
statusReport: UploadStatusReport;
sarifID: string;
}
// Recursively walks a directory and returns all SARIF files it finds.
// Does not follow symlinks.
export function findSarifFilesInDir(sarifPath: string): string[] {
@@ -154,7 +147,7 @@ export async function uploadFromActions(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadResult> {
): Promise<UploadStatusReport> {
return await uploadFiles(
getSarifFilePaths(sarifPath),
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
@@ -185,7 +178,7 @@ export async function uploadFromRunner(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadResult> {
): Promise<UploadStatusReport> {
return await uploadFiles(
getSarifFilePaths(sarifPath),
repositoryNwo,
@@ -346,7 +339,7 @@ async function uploadFiles(
gitHubVersion: util.GitHubVersion,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<UploadResult> {
): Promise<UploadStatusReport> {
logger.startGroup("Uploading results");
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
@@ -397,88 +390,17 @@ async function uploadFiles(
logger.debug(`Number of results in upload: ${numResultInSarif}`);
// Make the upload
const sarifID = await uploadPayload(
payload,
repositoryNwo,
apiDetails,
logger
);
await uploadPayload(payload, repositoryNwo, apiDetails, logger);
logger.endGroup();
return {
statusReport: {
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
},
sarifID,
raw_upload_size_bytes: rawUploadSizeBytes,
zipped_upload_size_bytes: zippedUploadSizeBytes,
num_results_in_sarif: numResultInSarif,
};
}
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
export async function waitForProcessing(
repositoryNwo: RepositoryNwo,
sarifID: string,
apiDetails: api.GitHubApiDetails,
logger: Logger
): Promise<void> {
logger.startGroup("Waiting for processing to finish");
const client = api.getApiClient(apiDetails);
const statusCheckingStarted = Date.now();
// eslint-disable-next-line no-constant-condition
while (true) {
if (
Date.now() >
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
) {
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
logger.warning(
"Timed out waiting for analysis to finish processing. Continuing."
);
break;
}
try {
const response = await client.request(
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
sarif_id: sarifID,
}
);
const status = response.data.processing_status;
logger.info(`Analysis upload status is ${status}.`);
if (status === "complete") {
break;
} else if (status === "failed") {
throw new Error(
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
);
}
} catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
case 404:
logger.debug("Analysis is not found yet...");
break; // Note this breaks from the case statement, not the outer loop.
default:
throw e;
}
} else {
throw e;
}
}
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
}
logger.endGroup();
}
export function validateUniqueCategory(category: string | undefined) {
if (util.isActions()) {
// This check only works on actions as env vars don't persist between calls to the runner

View File

@@ -2,7 +2,6 @@ import * as core from "@actions/core";
import * as actionsUtil from "./actions-util";
import { getActionsLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
import * as upload_lib from "./upload-lib";
import {
getGitHubVersion,
@@ -57,21 +56,13 @@ async function run() {
const gitHubVersion = await getGitHubVersion(apiDetails);
const uploadResult = await upload_lib.uploadFromActions(
const uploadStats = await upload_lib.uploadFromActions(
actionsUtil.getRequiredInput("sarif_file"),
gitHubVersion,
apiDetails,
getActionsLogger()
);
if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
await upload_lib.waitForProcessing(
parseRepositoryNwo(getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
apiDetails,
getActionsLogger()
);
}
await sendSuccessStatusReport(startedAt, uploadResult.statusReport);
await sendSuccessStatusReport(startedAt, uploadStats);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
const stack = error instanceof Error ? error.stack : String(error);

View File

@@ -564,7 +564,3 @@ export async function bundleDb(
}
return databaseBundlePath;
}
export async function delay(milliseconds: number) {
return new Promise((resolve) => setTimeout(resolve, milliseconds));
}

View File

@@ -6,7 +6,7 @@ inputs:
description: |
The SARIF file or directory of SARIF files to be uploaded to GitHub code scanning.
See https://docs.github.com/en/code-security/code-scanning/integrating-with-code-scanning/uploading-a-sarif-file-to-github#uploading-a-code-scanning-analysis-with-github-actions
for information on the maximum number of results and maximum file size supported by code scanning.
for information on the maximum number of results and maximum file size supported by code scanning.
required: false
default: '../results'
checkout_path:
@@ -20,10 +20,6 @@ inputs:
category:
description: String used by Code Scanning for matching the analyses
required: false
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
default: "false"
runs:
using: 'node12'
main: '../lib/upload-sarif-action.js'