mirror of
https://github.com/github/codeql-action.git
synced 2025-12-26 17:20:10 +08:00
Merge branch 'master' into analysisName
This commit is contained in:
@@ -15,7 +15,8 @@ async function run() {
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const language = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]?.split(',')[0];
|
||||
const autobuildLanguages = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]?.split(',') || [];
|
||||
const language = autobuildLanguages[0];
|
||||
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
@@ -24,6 +25,10 @@ async function run() {
|
||||
|
||||
core.debug(`Detected dominant traced language: ${language}`);
|
||||
|
||||
if (autobuildLanguages.length > 1) {
|
||||
core.warning(`We will only automatically build ${language} code. If you wish to scan ${autobuildLanguages.slice(1).join(' and ')}, you must replace this block with custom build steps.`);
|
||||
}
|
||||
|
||||
core.startGroup(`Attempting to automatically build ${language} code`);
|
||||
// TODO: share config accross actions better via env variables
|
||||
const codeqlCmd = util.getRequiredEnvParam(sharedEnv.CODEQL_ACTION_CMD);
|
||||
@@ -44,7 +49,7 @@ async function run() {
|
||||
core.endGroup();
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
@@ -53,6 +58,6 @@ async function run() {
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed: " + e);
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
console.log(e);
|
||||
});
|
||||
|
||||
@@ -3,15 +3,19 @@ import * as path from "path";
|
||||
|
||||
import * as configUtils from "./config-utils";
|
||||
import * as externalQueries from "./external-queries";
|
||||
import * as util from "./util";
|
||||
|
||||
test("checkoutExternalQueries", async () => {
|
||||
let config = new configUtils.Config();
|
||||
config.externalQueries = [
|
||||
new configUtils.ExternalQuery("github/codeql-go", "df4c6869212341b601005567381944ed90906b6b"),
|
||||
];
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
let destination = process.env["RUNNER_WORKSPACE"] || "/tmp/codeql-action/";
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
expect(fs.existsSync(path.join(destination, "github", "codeql-go", "COPYRIGHT"))).toBeTruthy();
|
||||
await util.withTmpDir(async tmpDir => {
|
||||
process.env["RUNNER_WORKSPACE"] = tmpDir;
|
||||
await externalQueries.checkoutExternalQueries(config);
|
||||
|
||||
// COPYRIGHT file existed in df4c6869212341b601005567381944ed90906b6b but not in master
|
||||
expect(fs.existsSync(path.join(tmpDir, "github", "codeql-go", "COPYRIGHT"))).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,9 +4,10 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as configUtils from './config-utils';
|
||||
import * as util from './util';
|
||||
|
||||
export async function checkoutExternalQueries(config: configUtils.Config) {
|
||||
const folder = process.env['RUNNER_WORKSPACE'] || '/tmp/codeql-action';
|
||||
const folder = util.getRequiredEnvParam('RUNNER_WORKSPACE');
|
||||
|
||||
for (const externalQuery of config.externalQueries) {
|
||||
core.info('Checking out ' + externalQuery.repository);
|
||||
|
||||
@@ -150,7 +150,10 @@ async function run() {
|
||||
await runQueries(codeqlCmd, databaseFolder, sarifFolder, config);
|
||||
|
||||
if ('true' === core.getInput('upload')) {
|
||||
await upload_lib.upload(sarifFolder);
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('failed', 'upload');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
|
||||
@@ -47,22 +47,86 @@ export function combineSarifFiles(sarifFiles: string[]): string {
|
||||
return JSON.stringify(combinedSarif);
|
||||
}
|
||||
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload): Promise<boolean> {
|
||||
core.info('Uploading results');
|
||||
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
|
||||
const res: http.HttpClientResponse = await client.put(url, payload);
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
|
||||
const statusCode = res.message.statusCode;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
}
|
||||
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + await res.readBody());
|
||||
// Sleep for the backoff period
|
||||
await new Promise(r => setTimeout(r, backoffPeriods[attempt] * 1000));
|
||||
continue;
|
||||
|
||||
} else {
|
||||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + await res.readBody());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
export async function upload(input: string) {
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
export async function upload(input: string): Promise<boolean> {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
await uploadFiles(sarifFiles);
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
} else {
|
||||
await uploadFiles([input]);
|
||||
return await uploadFiles([input]);
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
async function uploadFiles(sarifFiles: string[]) {
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
core.startGroup("Uploading results");
|
||||
let succeeded = false;
|
||||
try {
|
||||
// Check if an upload has happened before. If so then abort.
|
||||
// This is intended to catch when the finish and upload-sarif actions
|
||||
@@ -70,7 +134,7 @@ async function uploadFiles(sarifFiles: string[]) {
|
||||
const sentinelFile = await getSentinelFilePath();
|
||||
if (fs.existsSync(sentinelFile)) {
|
||||
core.info("Aborting as an upload has already happened from this job");
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
const commitOid = util.getRequiredEnvParam('GITHUB_SHA');
|
||||
@@ -80,7 +144,7 @@ async function uploadFiles(sarifFiles: string[]) {
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
|
||||
core.debug("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
|
||||
@@ -91,7 +155,7 @@ async function uploadFiles(sarifFiles: string[]) {
|
||||
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
@@ -114,25 +178,8 @@ async function uploadFiles(sarifFiles: string[]) {
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
|
||||
core.info('Uploading results');
|
||||
const githubToken = core.getInput('token');
|
||||
const ph: auth.BearerCredentialHandler = new auth.BearerCredentialHandler(githubToken);
|
||||
const client = new http.HttpClient('Code Scanning : Upload SARIF', [ph]);
|
||||
const url = 'https://api.github.com/repos/' + process.env['GITHUB_REPOSITORY'] + '/code-scanning/analysis';
|
||||
const res: http.HttpClientResponse = await client.put(url, payload);
|
||||
const requestID = res.message.headers["x-github-request-id"];
|
||||
|
||||
core.debug('response status: ' + res.message.statusCode);
|
||||
if (res.message.statusCode === 500) {
|
||||
// If the upload fails with 500 then we assume it is a temporary problem
|
||||
// with turbo-scan and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
} else if (res.message.statusCode !== 202) {
|
||||
core.setFailed('Upload failed (' + requestID + '): ' + await res.readBody());
|
||||
} else {
|
||||
core.info("Successfully uploaded results");
|
||||
}
|
||||
// Make the upload
|
||||
succeeded = await uploadPayload(payload);
|
||||
|
||||
// Mark that we have made an upload
|
||||
fs.writeFileSync(sentinelFile, '');
|
||||
@@ -141,4 +188,6 @@ async function uploadFiles(sarifFiles: string[]) {
|
||||
core.setFailed(error.message);
|
||||
}
|
||||
core.endGroup();
|
||||
|
||||
return succeeded;
|
||||
}
|
||||
|
||||
@@ -9,17 +9,19 @@ async function run() {
|
||||
}
|
||||
|
||||
try {
|
||||
await upload_lib.upload(core.getInput('sarif_file'));
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
} else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
core.setFailed("upload-sarif action failed: " + e);
|
||||
core.setFailed("codeql/upload-sarif action failed: " + e);
|
||||
console.log(e);
|
||||
});
|
||||
|
||||
10
src/util.ts
10
src/util.ts
@@ -3,6 +3,8 @@ import * as http from '@actions/http-client';
|
||||
import * as auth from '@actions/http-client/auth';
|
||||
import * as octokit from '@octokit/rest';
|
||||
import consoleLogLevel from 'console-log-level';
|
||||
import * as fs from "fs";
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
|
||||
import * as sharedEnv from './shared-environment';
|
||||
@@ -361,3 +363,11 @@ export function getToolNames(sarifContents: string): string[] {
|
||||
|
||||
return Object.keys(toolNames);
|
||||
}
|
||||
|
||||
// Creates a random temporary directory, runs the given body, and then deletes the directory.
|
||||
// Mostly intended for use within tests.
|
||||
export async function withTmpDir(body: (tmpDir: string) => Promise<void>) {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'codeql-action-'));
|
||||
await body(tmpDir);
|
||||
fs.rmdirSync(tmpDir, { recursive: true });
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user