mirror of
https://github.com/github/codeql-action.git
synced 2025-12-24 08:10:06 +08:00
commit node_modules and generated files
This commit is contained in:
135
lib/upload-lib.js
generated
135
lib/upload-lib.js
generated
@@ -1,4 +1,7 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
@@ -6,11 +9,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const jsonschema = __importStar(require("jsonschema"));
|
||||
@@ -18,7 +17,6 @@ const path = __importStar(require("path"));
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const fingerprints = __importStar(require("./fingerprints"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
@@ -43,29 +41,32 @@ function combineSarifFiles(sarifFiles) {
|
||||
exports.combineSarifFiles = combineSarifFiles;
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
async function uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger) {
|
||||
logger.info('Uploading results');
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return;
|
||||
}
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
// number of seconds between each attempt.
|
||||
// We don't want to backoff too much to avoid wasting action
|
||||
// minutes, but just waiting a little bit could maybe help.
|
||||
const backoffPeriods = [1, 5, 15];
|
||||
const client = api.getApiClient(githubAuth, githubApiUrl);
|
||||
for (let attempt = 0; attempt <= backoffPeriods.length; attempt++) {
|
||||
const response = await api.getApiClient().request("PUT /repos/:owner/:repo/code-scanning/analysis", ({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
const reqURL = mode === 'actions'
|
||||
? 'PUT /repos/:owner/:repo/code-scanning/analysis'
|
||||
: 'POST /repos/:owner/:repo/code-scanning/sarifs';
|
||||
const response = await client.request(reqURL, ({
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
data: payload,
|
||||
}));
|
||||
core.debug('response status: ' + response.status);
|
||||
logger.debug('response status: ' + response.status);
|
||||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
logger.info("Successfully uploaded results");
|
||||
return;
|
||||
}
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
@@ -76,7 +77,7 @@ async function uploadPayload(payload) {
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
// Log the failure as a warning but don't mark the action as failed yet
|
||||
core.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
logger.warning('Upload attempt (' + (attempt + 1) + ' of ' + (backoffPeriods.length + 1) +
|
||||
') failed (' + requestID + '). Retrying in ' + backoffPeriods[attempt] +
|
||||
' seconds: (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
// Sleep for the backoff period
|
||||
@@ -97,19 +98,21 @@ async function uploadPayload(payload) {
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function upload(input) {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
async function upload(sarifFile, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
const sarifFiles = [];
|
||||
if (fs.lstatSync(sarifFile).isDirectory()) {
|
||||
fs.readdirSync(sarifFile)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
.map(f => path.resolve(sarifFile, f))
|
||||
.forEach(f => sarifFiles.push(f));
|
||||
if (sarifFiles.length === 0) {
|
||||
throw new Error("No SARIF files found to upload in \"" + input + "\".");
|
||||
throw new Error("No SARIF files found to upload in \"" + sarifFile + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
else {
|
||||
return await uploadFiles([input]);
|
||||
sarifFiles.push(sarifFile);
|
||||
}
|
||||
return await uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger);
|
||||
}
|
||||
exports.upload = upload;
|
||||
// Counts the number of results in the given SARIF file
|
||||
@@ -123,16 +126,16 @@ function countResultsInSarif(sarif) {
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Throws an error if the file is invalid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
function validateSarifFileSchema(sarifFilePath, logger) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const schema = require('../src/sarif_v2.1.0_schema.json');
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
logger.startGroup("Error details: " + error.stack);
|
||||
logger.info(JSON.stringify(error, null, 2));
|
||||
logger.endGroup();
|
||||
}
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
@@ -143,65 +146,49 @@ function validateSarifFileSchema(sarifFilePath) {
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles) {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKey, analysisName, workflowRunID, checkoutPath, environment, githubAuth, githubApiUrl, mode, logger) {
|
||||
logger.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file);
|
||||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
let checkoutPath = core.getInput('checkout_path');
|
||||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
matrix = undefined;
|
||||
}
|
||||
const toolNames = util.getToolNames(sarifPayload);
|
||||
const payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": matrix,
|
||||
"started_at": startedAt,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
core.debug("Number of results in upload: " + numResultInSarif);
|
||||
if (!util.isLocalRun()) {
|
||||
// Make the upload
|
||||
await uploadPayload(payload);
|
||||
let payload;
|
||||
if (mode === 'actions') {
|
||||
payload = JSON.stringify({
|
||||
"commit_oid": commitOid,
|
||||
"ref": ref,
|
||||
"analysis_key": analysisKey,
|
||||
"analysis_name": analysisName,
|
||||
"sarif": zipped_sarif,
|
||||
"workflow_run_id": workflowRunID,
|
||||
"checkout_uri": checkoutURI,
|
||||
"environment": environment,
|
||||
"tool_names": toolNames,
|
||||
});
|
||||
}
|
||||
else {
|
||||
core.debug("Not uploading because this is a local run.");
|
||||
payload = JSON.stringify({
|
||||
"commit_sha": commitOid,
|
||||
"ref": ref,
|
||||
"sarif": zipped_sarif,
|
||||
"checkout_uri": checkoutURI,
|
||||
"tool_name": toolNames[0],
|
||||
});
|
||||
}
|
||||
core.endGroup();
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
console.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
console.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
console.debug("Number of results in upload: " + numResultInSarif);
|
||||
// Make the upload
|
||||
await uploadPayload(payload, repositoryNwo, githubAuth, githubApiUrl, mode, logger);
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
|
||||
Reference in New Issue
Block a user