mirror of
https://github.com/github/codeql-action.git
synced 2026-01-04 21:50:17 +08:00
Compare commits
4 Commits
default-se
...
redsun82/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fbfe02d3e | ||
|
|
53b268a8f0 | ||
|
|
33a31c1c92 | ||
|
|
a7fb336064 |
199
lib/analyze-action.js
generated
199
lib/analyze-action.js
generated
@@ -95581,98 +95581,113 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
}
|
}
|
||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
const sarifPaths = getSarifFilePaths(
|
const sarifPaths = getSarifFilePaths(
|
||||||
inputSarifPath,
|
inputSarifPath,
|
||||||
uploadTarget.sarifPredicate
|
uploadTarget.sarifPredicate
|
||||||
);
|
);
|
||||||
return uploadSpecifiedFiles(
|
return maybeUploadSpecifiedFiles(
|
||||||
sarifPaths,
|
sarifPaths,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
category,
|
category,
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
uploadTarget
|
uploadTarget,
|
||||||
|
uploadKind
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
|
||||||
let sarif;
|
|
||||||
if (sarifPaths.length > 1) {
|
|
||||||
for (const sarifPath of sarifPaths) {
|
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
|
||||||
}
|
|
||||||
sarif = await combineSarifFilesUsingCLI(
|
|
||||||
sarifPaths,
|
|
||||||
gitHubVersion,
|
|
||||||
features,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const sarifPath = sarifPaths[0];
|
|
||||||
sarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
|
||||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
|
||||||
}
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
|
||||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
|
||||||
const analysisKey = await getAnalysisKey();
|
|
||||||
const environment = getRequiredInput("matrix");
|
|
||||||
sarif = populateRunAutomationDetails(
|
|
||||||
sarif,
|
|
||||||
category,
|
|
||||||
analysisKey,
|
|
||||||
environment
|
|
||||||
);
|
|
||||||
const toolNames = getToolNames(sarif);
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
|
||||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||||
if (dumpDir) {
|
const upload = uploadKind === "always";
|
||||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
if (!upload && !dumpDir) {
|
||||||
|
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||||
|
try {
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
let sarif;
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
|
for (const sarifPath of sarifPaths) {
|
||||||
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
|
}
|
||||||
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
|
sarifPaths,
|
||||||
|
gitHubVersion,
|
||||||
|
features,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
|
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||||
|
}
|
||||||
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
|
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||||
|
const analysisKey = await getAnalysisKey();
|
||||||
|
const environment = getRequiredInput("matrix");
|
||||||
|
sarif = populateRunAutomationDetails(
|
||||||
|
sarif,
|
||||||
|
category,
|
||||||
|
analysisKey,
|
||||||
|
environment
|
||||||
|
);
|
||||||
|
const toolNames = getToolNames(sarif);
|
||||||
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
if (!upload) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||||
|
);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
|
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||||
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
|
const payload = buildPayload(
|
||||||
|
await getCommitOid(checkoutPath),
|
||||||
|
await getRef(),
|
||||||
|
analysisKey,
|
||||||
|
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
|
zippedSarif,
|
||||||
|
getWorkflowRunID(),
|
||||||
|
getWorkflowRunAttempt(),
|
||||||
|
checkoutURI,
|
||||||
|
environment,
|
||||||
|
toolNames,
|
||||||
|
await determineBaseBranchHeadCommitOid()
|
||||||
|
);
|
||||||
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
const sarifID = await uploadPayload(
|
||||||
|
payload,
|
||||||
|
getRepositoryNwo(),
|
||||||
|
logger,
|
||||||
|
uploadTarget.target
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
statusReport: {
|
||||||
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||||
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||||
|
num_results_in_sarif: numResultInSarif
|
||||||
|
},
|
||||||
|
sarifID
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
|
||||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
|
||||||
const payload = buildPayload(
|
|
||||||
await getCommitOid(checkoutPath),
|
|
||||||
await getRef(),
|
|
||||||
analysisKey,
|
|
||||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
zippedSarif,
|
|
||||||
getWorkflowRunID(),
|
|
||||||
getWorkflowRunAttempt(),
|
|
||||||
checkoutURI,
|
|
||||||
environment,
|
|
||||||
toolNames,
|
|
||||||
await determineBaseBranchHeadCommitOid()
|
|
||||||
);
|
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
||||||
const zippedUploadSizeBytes = zippedSarif.length;
|
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
||||||
const sarifID = await uploadPayload(
|
|
||||||
payload,
|
|
||||||
getRepositoryNwo(),
|
|
||||||
logger,
|
|
||||||
uploadTarget.target
|
|
||||||
);
|
|
||||||
logger.endGroup();
|
|
||||||
return {
|
|
||||||
statusReport: {
|
|
||||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
|
||||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
|
||||||
num_results_in_sarif: numResultInSarif
|
|
||||||
},
|
|
||||||
sarifID
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||||
if (!fs18.existsSync(outputDir)) {
|
if (!fs18.existsSync(outputDir)) {
|
||||||
@@ -96033,21 +96048,26 @@ async function run() {
|
|||||||
}
|
}
|
||||||
core14.setOutput("db-locations", dbLocations);
|
core14.setOutput("db-locations", dbLocations);
|
||||||
core14.setOutput("sarif-output", import_path4.default.resolve(outputDir));
|
core14.setOutput("sarif-output", import_path4.default.resolve(outputDir));
|
||||||
const uploadInput = getOptionalInput("upload");
|
const uploadInput = getUploadValue(
|
||||||
if (runStats && getUploadValue(uploadInput) === "always") {
|
getOptionalInput("upload")
|
||||||
|
);
|
||||||
|
if (runStats) {
|
||||||
if (isCodeScanningEnabled(config)) {
|
if (isCodeScanningEnabled(config)) {
|
||||||
uploadResult = await uploadFiles(
|
uploadResult = await maybeUploadFiles(
|
||||||
outputDir,
|
outputDir,
|
||||||
getRequiredInput("checkout_path"),
|
getRequiredInput("checkout_path"),
|
||||||
getOptionalInput("category"),
|
getOptionalInput("category"),
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
CodeScanning
|
CodeScanning,
|
||||||
|
uploadInput
|
||||||
);
|
);
|
||||||
core14.setOutput("sarif-id", uploadResult.sarifID);
|
if (uploadResult) {
|
||||||
|
core14.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (isCodeQualityEnabled(config)) {
|
if (isCodeQualityEnabled(config)) {
|
||||||
const qualityUploadResult = await uploadFiles(
|
const qualityUploadResult = await maybeUploadFiles(
|
||||||
outputDir,
|
outputDir,
|
||||||
getRequiredInput("checkout_path"),
|
getRequiredInput("checkout_path"),
|
||||||
fixCodeQualityCategory(
|
fixCodeQualityCategory(
|
||||||
@@ -96056,12 +96076,15 @@ async function run() {
|
|||||||
),
|
),
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
CodeQuality
|
CodeQuality,
|
||||||
|
uploadInput
|
||||||
);
|
);
|
||||||
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
if (qualityUploadResult) {
|
||||||
|
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.info("Not uploading results");
|
logger.info("No query status report, skipping upload");
|
||||||
}
|
}
|
||||||
await uploadOverlayBaseDatabaseToCache(codeql, config, logger);
|
await uploadOverlayBaseDatabaseToCache(codeql, config, logger);
|
||||||
await uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger);
|
await uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger);
|
||||||
|
|||||||
182
lib/init-action-post.js
generated
182
lib/init-action-post.js
generated
@@ -133019,97 +133019,123 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
|
return maybeUploadFiles(
|
||||||
|
inputSarifPath,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
"always"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
const sarifPaths = getSarifFilePaths(
|
const sarifPaths = getSarifFilePaths(
|
||||||
inputSarifPath,
|
inputSarifPath,
|
||||||
uploadTarget.sarifPredicate
|
uploadTarget.sarifPredicate
|
||||||
);
|
);
|
||||||
return uploadSpecifiedFiles(
|
return maybeUploadSpecifiedFiles(
|
||||||
sarifPaths,
|
sarifPaths,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
category,
|
category,
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
uploadTarget
|
uploadTarget,
|
||||||
|
uploadKind
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
|
||||||
let sarif;
|
|
||||||
if (sarifPaths.length > 1) {
|
|
||||||
for (const sarifPath of sarifPaths) {
|
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
|
||||||
}
|
|
||||||
sarif = await combineSarifFilesUsingCLI(
|
|
||||||
sarifPaths,
|
|
||||||
gitHubVersion,
|
|
||||||
features,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const sarifPath = sarifPaths[0];
|
|
||||||
sarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
|
||||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
|
||||||
}
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
|
||||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
|
||||||
const analysisKey = await getAnalysisKey();
|
|
||||||
const environment = getRequiredInput("matrix");
|
|
||||||
sarif = populateRunAutomationDetails(
|
|
||||||
sarif,
|
|
||||||
category,
|
|
||||||
analysisKey,
|
|
||||||
environment
|
|
||||||
);
|
|
||||||
const toolNames = getToolNames(sarif);
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
|
||||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||||
if (dumpDir) {
|
const upload = uploadKind === "always";
|
||||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
if (!upload && !dumpDir) {
|
||||||
|
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||||
|
try {
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
let sarif;
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
|
for (const sarifPath of sarifPaths) {
|
||||||
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
|
}
|
||||||
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
|
sarifPaths,
|
||||||
|
gitHubVersion,
|
||||||
|
features,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
|
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||||
|
}
|
||||||
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
|
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||||
|
const analysisKey = await getAnalysisKey();
|
||||||
|
const environment = getRequiredInput("matrix");
|
||||||
|
sarif = populateRunAutomationDetails(
|
||||||
|
sarif,
|
||||||
|
category,
|
||||||
|
analysisKey,
|
||||||
|
environment
|
||||||
|
);
|
||||||
|
const toolNames = getToolNames(sarif);
|
||||||
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
if (!upload) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||||
|
);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
|
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||||
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
|
const payload = buildPayload(
|
||||||
|
await getCommitOid(checkoutPath),
|
||||||
|
await getRef(),
|
||||||
|
analysisKey,
|
||||||
|
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
|
zippedSarif,
|
||||||
|
getWorkflowRunID(),
|
||||||
|
getWorkflowRunAttempt(),
|
||||||
|
checkoutURI,
|
||||||
|
environment,
|
||||||
|
toolNames,
|
||||||
|
await determineBaseBranchHeadCommitOid()
|
||||||
|
);
|
||||||
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
const sarifID = await uploadPayload(
|
||||||
|
payload,
|
||||||
|
getRepositoryNwo(),
|
||||||
|
logger,
|
||||||
|
uploadTarget.target
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
statusReport: {
|
||||||
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||||
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||||
|
num_results_in_sarif: numResultInSarif
|
||||||
|
},
|
||||||
|
sarifID
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
|
||||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
|
||||||
const payload = buildPayload(
|
|
||||||
await getCommitOid(checkoutPath),
|
|
||||||
await getRef(),
|
|
||||||
analysisKey,
|
|
||||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
zippedSarif,
|
|
||||||
getWorkflowRunID(),
|
|
||||||
getWorkflowRunAttempt(),
|
|
||||||
checkoutURI,
|
|
||||||
environment,
|
|
||||||
toolNames,
|
|
||||||
await determineBaseBranchHeadCommitOid()
|
|
||||||
);
|
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
||||||
const zippedUploadSizeBytes = zippedSarif.length;
|
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
||||||
const sarifID = await uploadPayload(
|
|
||||||
payload,
|
|
||||||
getRepositoryNwo(),
|
|
||||||
logger,
|
|
||||||
uploadTarget.target
|
|
||||||
);
|
|
||||||
logger.endGroup();
|
|
||||||
return {
|
|
||||||
statusReport: {
|
|
||||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
|
||||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
|
||||||
num_results_in_sarif: numResultInSarif
|
|
||||||
},
|
|
||||||
sarifID
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||||
if (!fs17.existsSync(outputDir)) {
|
if (!fs17.existsSync(outputDir)) {
|
||||||
|
|||||||
191
lib/upload-lib.js
generated
191
lib/upload-lib.js
generated
@@ -84782,6 +84782,7 @@ __export(upload_lib_exports, {
|
|||||||
buildPayload: () => buildPayload,
|
buildPayload: () => buildPayload,
|
||||||
findSarifFilesInDir: () => findSarifFilesInDir,
|
findSarifFilesInDir: () => findSarifFilesInDir,
|
||||||
getSarifFilePaths: () => getSarifFilePaths,
|
getSarifFilePaths: () => getSarifFilePaths,
|
||||||
|
maybeUploadFiles: () => maybeUploadFiles,
|
||||||
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
||||||
readSarifFile: () => readSarifFile,
|
readSarifFile: () => readSarifFile,
|
||||||
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
|
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
|
||||||
@@ -92391,97 +92392,134 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
|
return maybeUploadFiles(
|
||||||
|
inputSarifPath,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
"always"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
const sarifPaths = getSarifFilePaths(
|
const sarifPaths = getSarifFilePaths(
|
||||||
inputSarifPath,
|
inputSarifPath,
|
||||||
uploadTarget.sarifPredicate
|
uploadTarget.sarifPredicate
|
||||||
);
|
);
|
||||||
return uploadSpecifiedFiles(
|
return maybeUploadSpecifiedFiles(
|
||||||
sarifPaths,
|
sarifPaths,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
category,
|
category,
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
uploadTarget
|
uploadTarget,
|
||||||
|
uploadKind
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
return maybeUploadSpecifiedFiles(
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
sarifPaths,
|
||||||
const gitHubVersion = await getGitHubVersion();
|
checkoutPath,
|
||||||
let sarif;
|
|
||||||
if (sarifPaths.length > 1) {
|
|
||||||
for (const sarifPath of sarifPaths) {
|
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
|
||||||
}
|
|
||||||
sarif = await combineSarifFilesUsingCLI(
|
|
||||||
sarifPaths,
|
|
||||||
gitHubVersion,
|
|
||||||
features,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const sarifPath = sarifPaths[0];
|
|
||||||
sarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
|
||||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
|
||||||
}
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
|
||||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
|
||||||
const analysisKey = await getAnalysisKey();
|
|
||||||
const environment = getRequiredInput("matrix");
|
|
||||||
sarif = populateRunAutomationDetails(
|
|
||||||
sarif,
|
|
||||||
category,
|
category,
|
||||||
analysisKey,
|
features,
|
||||||
environment
|
|
||||||
);
|
|
||||||
const toolNames = getToolNames(sarif);
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
|
||||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
|
||||||
if (dumpDir) {
|
|
||||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
|
||||||
}
|
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
|
||||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
|
||||||
const payload = buildPayload(
|
|
||||||
await getCommitOid(checkoutPath),
|
|
||||||
await getRef(),
|
|
||||||
analysisKey,
|
|
||||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
zippedSarif,
|
|
||||||
getWorkflowRunID(),
|
|
||||||
getWorkflowRunAttempt(),
|
|
||||||
checkoutURI,
|
|
||||||
environment,
|
|
||||||
toolNames,
|
|
||||||
await determineBaseBranchHeadCommitOid()
|
|
||||||
);
|
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
||||||
const zippedUploadSizeBytes = zippedSarif.length;
|
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
||||||
const sarifID = await uploadPayload(
|
|
||||||
payload,
|
|
||||||
getRepositoryNwo(),
|
|
||||||
logger,
|
logger,
|
||||||
uploadTarget.target
|
uploadTarget,
|
||||||
|
"always"
|
||||||
);
|
);
|
||||||
logger.endGroup();
|
}
|
||||||
return {
|
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
statusReport: {
|
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
const upload = uploadKind === "always";
|
||||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
if (!upload && !dumpDir) {
|
||||||
num_results_in_sarif: numResultInSarif
|
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||||
},
|
return void 0;
|
||||||
sarifID
|
}
|
||||||
};
|
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||||
|
try {
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
let sarif;
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
|
for (const sarifPath of sarifPaths) {
|
||||||
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
|
}
|
||||||
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
|
sarifPaths,
|
||||||
|
gitHubVersion,
|
||||||
|
features,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
|
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||||
|
}
|
||||||
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
|
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||||
|
const analysisKey = await getAnalysisKey();
|
||||||
|
const environment = getRequiredInput("matrix");
|
||||||
|
sarif = populateRunAutomationDetails(
|
||||||
|
sarif,
|
||||||
|
category,
|
||||||
|
analysisKey,
|
||||||
|
environment
|
||||||
|
);
|
||||||
|
const toolNames = getToolNames(sarif);
|
||||||
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
if (!upload) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||||
|
);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
|
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||||
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
|
const payload = buildPayload(
|
||||||
|
await getCommitOid(checkoutPath),
|
||||||
|
await getRef(),
|
||||||
|
analysisKey,
|
||||||
|
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
|
zippedSarif,
|
||||||
|
getWorkflowRunID(),
|
||||||
|
getWorkflowRunAttempt(),
|
||||||
|
checkoutURI,
|
||||||
|
environment,
|
||||||
|
toolNames,
|
||||||
|
await determineBaseBranchHeadCommitOid()
|
||||||
|
);
|
||||||
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
const sarifID = await uploadPayload(
|
||||||
|
payload,
|
||||||
|
getRepositoryNwo(),
|
||||||
|
logger,
|
||||||
|
uploadTarget.target
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
statusReport: {
|
||||||
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||||
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||||
|
num_results_in_sarif: numResultInSarif
|
||||||
|
},
|
||||||
|
sarifID
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||||
if (!fs13.existsSync(outputDir)) {
|
if (!fs13.existsSync(outputDir)) {
|
||||||
@@ -92655,6 +92693,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
|||||||
buildPayload,
|
buildPayload,
|
||||||
findSarifFilesInDir,
|
findSarifFilesInDir,
|
||||||
getSarifFilePaths,
|
getSarifFilePaths,
|
||||||
|
maybeUploadFiles,
|
||||||
populateRunAutomationDetails,
|
populateRunAutomationDetails,
|
||||||
readSarifFile,
|
readSarifFile,
|
||||||
shouldConsiderConfigurationError,
|
shouldConsiderConfigurationError,
|
||||||
|
|||||||
189
lib/upload-sarif-action.js
generated
189
lib/upload-sarif-action.js
generated
@@ -93092,97 +93092,134 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
|
return maybeUploadFiles(
|
||||||
|
inputSarifPath,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
"always"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
const sarifPaths = getSarifFilePaths(
|
const sarifPaths = getSarifFilePaths(
|
||||||
inputSarifPath,
|
inputSarifPath,
|
||||||
uploadTarget.sarifPredicate
|
uploadTarget.sarifPredicate
|
||||||
);
|
);
|
||||||
return uploadSpecifiedFiles(
|
return maybeUploadSpecifiedFiles(
|
||||||
sarifPaths,
|
sarifPaths,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
category,
|
category,
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
uploadTarget
|
uploadTarget,
|
||||||
|
uploadKind
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
return maybeUploadSpecifiedFiles(
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
sarifPaths,
|
||||||
const gitHubVersion = await getGitHubVersion();
|
checkoutPath,
|
||||||
let sarif;
|
|
||||||
if (sarifPaths.length > 1) {
|
|
||||||
for (const sarifPath of sarifPaths) {
|
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
|
||||||
}
|
|
||||||
sarif = await combineSarifFilesUsingCLI(
|
|
||||||
sarifPaths,
|
|
||||||
gitHubVersion,
|
|
||||||
features,
|
|
||||||
logger
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const sarifPath = sarifPaths[0];
|
|
||||||
sarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
|
||||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
|
||||||
}
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
|
||||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
|
||||||
const analysisKey = await getAnalysisKey();
|
|
||||||
const environment = getRequiredInput("matrix");
|
|
||||||
sarif = populateRunAutomationDetails(
|
|
||||||
sarif,
|
|
||||||
category,
|
category,
|
||||||
analysisKey,
|
features,
|
||||||
environment
|
|
||||||
);
|
|
||||||
const toolNames = getToolNames(sarif);
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
|
||||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
|
||||||
if (dumpDir) {
|
|
||||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
|
||||||
}
|
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
|
||||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
|
||||||
const payload = buildPayload(
|
|
||||||
await getCommitOid(checkoutPath),
|
|
||||||
await getRef(),
|
|
||||||
analysisKey,
|
|
||||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
zippedSarif,
|
|
||||||
getWorkflowRunID(),
|
|
||||||
getWorkflowRunAttempt(),
|
|
||||||
checkoutURI,
|
|
||||||
environment,
|
|
||||||
toolNames,
|
|
||||||
await determineBaseBranchHeadCommitOid()
|
|
||||||
);
|
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
||||||
const zippedUploadSizeBytes = zippedSarif.length;
|
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
||||||
const sarifID = await uploadPayload(
|
|
||||||
payload,
|
|
||||||
getRepositoryNwo(),
|
|
||||||
logger,
|
logger,
|
||||||
uploadTarget.target
|
uploadTarget,
|
||||||
|
"always"
|
||||||
);
|
);
|
||||||
logger.endGroup();
|
}
|
||||||
return {
|
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||||
statusReport: {
|
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
const upload = uploadKind === "always";
|
||||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
if (!upload && !dumpDir) {
|
||||||
num_results_in_sarif: numResultInSarif
|
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||||
},
|
return void 0;
|
||||||
sarifID
|
}
|
||||||
};
|
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||||
|
try {
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
let sarif;
|
||||||
|
if (sarifPaths.length > 1) {
|
||||||
|
for (const sarifPath of sarifPaths) {
|
||||||
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
|
}
|
||||||
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
|
sarifPaths,
|
||||||
|
gitHubVersion,
|
||||||
|
features,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
|
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||||
|
}
|
||||||
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
|
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||||
|
const analysisKey = await getAnalysisKey();
|
||||||
|
const environment = getRequiredInput("matrix");
|
||||||
|
sarif = populateRunAutomationDetails(
|
||||||
|
sarif,
|
||||||
|
category,
|
||||||
|
analysisKey,
|
||||||
|
environment
|
||||||
|
);
|
||||||
|
const toolNames = getToolNames(sarif);
|
||||||
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
if (!upload) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||||
|
);
|
||||||
|
return void 0;
|
||||||
|
}
|
||||||
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
|
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||||
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
|
const payload = buildPayload(
|
||||||
|
await getCommitOid(checkoutPath),
|
||||||
|
await getRef(),
|
||||||
|
analysisKey,
|
||||||
|
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
|
zippedSarif,
|
||||||
|
getWorkflowRunID(),
|
||||||
|
getWorkflowRunAttempt(),
|
||||||
|
checkoutURI,
|
||||||
|
environment,
|
||||||
|
toolNames,
|
||||||
|
await determineBaseBranchHeadCommitOid()
|
||||||
|
);
|
||||||
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
const sarifID = await uploadPayload(
|
||||||
|
payload,
|
||||||
|
getRepositoryNwo(),
|
||||||
|
logger,
|
||||||
|
uploadTarget.target
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
statusReport: {
|
||||||
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||||
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||||
|
num_results_in_sarif: numResultInSarif
|
||||||
|
},
|
||||||
|
sarifID
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||||
if (!fs14.existsSync(outputDir)) {
|
if (!fs14.existsSync(outputDir)) {
|
||||||
|
|||||||
@@ -330,22 +330,27 @@ async function run() {
|
|||||||
}
|
}
|
||||||
core.setOutput("db-locations", dbLocations);
|
core.setOutput("db-locations", dbLocations);
|
||||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||||
const uploadInput = actionsUtil.getOptionalInput("upload");
|
const uploadInput = actionsUtil.getUploadValue(
|
||||||
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
actionsUtil.getOptionalInput("upload"),
|
||||||
|
);
|
||||||
|
if (runStats) {
|
||||||
if (isCodeScanningEnabled(config)) {
|
if (isCodeScanningEnabled(config)) {
|
||||||
uploadResult = await uploadLib.uploadFiles(
|
uploadResult = await uploadLib.maybeUploadFiles(
|
||||||
outputDir,
|
outputDir,
|
||||||
actionsUtil.getRequiredInput("checkout_path"),
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
actionsUtil.getOptionalInput("category"),
|
actionsUtil.getOptionalInput("category"),
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
analyses.CodeScanning,
|
analyses.CodeScanning,
|
||||||
|
uploadInput,
|
||||||
);
|
);
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
if (uploadResult) {
|
||||||
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isCodeQualityEnabled(config)) {
|
if (isCodeQualityEnabled(config)) {
|
||||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
const qualityUploadResult = await uploadLib.maybeUploadFiles(
|
||||||
outputDir,
|
outputDir,
|
||||||
actionsUtil.getRequiredInput("checkout_path"),
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
actionsUtil.fixCodeQualityCategory(
|
actionsUtil.fixCodeQualityCategory(
|
||||||
@@ -355,11 +360,14 @@ async function run() {
|
|||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
analyses.CodeQuality,
|
analyses.CodeQuality,
|
||||||
|
uploadInput,
|
||||||
);
|
);
|
||||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
if (qualityUploadResult) {
|
||||||
|
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.info("Not uploading results");
|
logger.info("No query status report, skipping upload");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Possibly upload the overlay-base database to actions cache.
|
// Possibly upload the overlay-base database to actions cache.
|
||||||
|
|||||||
@@ -623,18 +623,44 @@ export async function uploadFiles(
|
|||||||
logger: Logger,
|
logger: Logger,
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
): Promise<UploadResult> {
|
): Promise<UploadResult> {
|
||||||
|
return maybeUploadFiles(
|
||||||
|
inputSarifPath,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
"always",
|
||||||
|
) as Promise<UploadResult>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
||||||
|
* to. It will only upload if `uploadKind === "always"`, and return `undefined` otherwise. However
|
||||||
|
* if `CODEQL_ACTION_SARIF_DUMP_DIR` is set, it will unconditionally process the input sarif files.
|
||||||
|
*/
|
||||||
|
export async function maybeUploadFiles(
|
||||||
|
inputSarifPath: string,
|
||||||
|
checkoutPath: string,
|
||||||
|
category: string | undefined,
|
||||||
|
features: FeatureEnablement,
|
||||||
|
logger: Logger,
|
||||||
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
|
uploadKind: actionsUtil.UploadKind,
|
||||||
|
): Promise<UploadResult | undefined> {
|
||||||
const sarifPaths = getSarifFilePaths(
|
const sarifPaths = getSarifFilePaths(
|
||||||
inputSarifPath,
|
inputSarifPath,
|
||||||
uploadTarget.sarifPredicate,
|
uploadTarget.sarifPredicate,
|
||||||
);
|
);
|
||||||
|
|
||||||
return uploadSpecifiedFiles(
|
return maybeUploadSpecifiedFiles(
|
||||||
sarifPaths,
|
sarifPaths,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
category,
|
category,
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
uploadTarget,
|
uploadTarget,
|
||||||
|
uploadKind,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -649,103 +675,137 @@ export async function uploadSpecifiedFiles(
|
|||||||
logger: Logger,
|
logger: Logger,
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
): Promise<UploadResult> {
|
): Promise<UploadResult> {
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
return maybeUploadSpecifiedFiles(
|
||||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
sarifPaths,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
"always",
|
||||||
|
) as Promise<UploadResult>;
|
||||||
|
}
|
||||||
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
async function maybeUploadSpecifiedFiles(
|
||||||
|
sarifPaths: string[],
|
||||||
|
checkoutPath: string,
|
||||||
|
category: string | undefined,
|
||||||
|
features: FeatureEnablement,
|
||||||
|
logger: Logger,
|
||||||
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
|
uploadKind: actionsUtil.UploadKind,
|
||||||
|
): Promise<UploadResult | undefined> {
|
||||||
|
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
|
||||||
|
const upload = uploadKind === "always";
|
||||||
|
if (!upload && !dumpDir) {
|
||||||
|
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
let sarif: SarifFile;
|
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||||
|
try {
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
|
||||||
if (sarifPaths.length > 1) {
|
const gitHubVersion = await getGitHubVersion();
|
||||||
// Validate that the files we were asked to upload are all valid SARIF files
|
|
||||||
for (const sarifPath of sarifPaths) {
|
let sarif: SarifFile;
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
if (sarifPaths.length > 1) {
|
||||||
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
|
for (const sarifPath of sarifPaths) {
|
||||||
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
sarif = await combineSarifFilesUsingCLI(
|
||||||
|
sarifPaths,
|
||||||
|
gitHubVersion,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const sarifPath = sarifPaths[0];
|
||||||
|
sarif = readSarifFile(sarifPath);
|
||||||
|
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||||
|
|
||||||
|
// Validate that there are no runs for the same category
|
||||||
|
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
sarif = await combineSarifFilesUsingCLI(
|
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||||
sarifPaths,
|
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||||
gitHubVersion,
|
|
||||||
features,
|
const analysisKey = await api.getAnalysisKey();
|
||||||
logger,
|
const environment = actionsUtil.getRequiredInput("matrix");
|
||||||
|
sarif = populateRunAutomationDetails(
|
||||||
|
sarif,
|
||||||
|
category,
|
||||||
|
analysisKey,
|
||||||
|
environment,
|
||||||
);
|
);
|
||||||
} else {
|
|
||||||
const sarifPath = sarifPaths[0];
|
|
||||||
sarif = readSarifFile(sarifPath);
|
|
||||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
|
||||||
|
|
||||||
// Validate that there are no runs for the same category
|
const toolNames = util.getToolNames(sarif);
|
||||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
|
||||||
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!upload) {
|
||||||
|
logger.info(
|
||||||
|
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
|
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||||
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
|
|
||||||
|
const payload = buildPayload(
|
||||||
|
await gitUtils.getCommitOid(checkoutPath),
|
||||||
|
await gitUtils.getRef(),
|
||||||
|
analysisKey,
|
||||||
|
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
|
zippedSarif,
|
||||||
|
actionsUtil.getWorkflowRunID(),
|
||||||
|
actionsUtil.getWorkflowRunAttempt(),
|
||||||
|
checkoutURI,
|
||||||
|
environment,
|
||||||
|
toolNames,
|
||||||
|
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Log some useful debug info about the info
|
||||||
|
const rawUploadSizeBytes = sarifPayload.length;
|
||||||
|
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||||
|
const zippedUploadSizeBytes = zippedSarif.length;
|
||||||
|
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||||
|
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||||
|
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||||
|
|
||||||
|
// Make the upload
|
||||||
|
const sarifID = await uploadPayload(
|
||||||
|
payload,
|
||||||
|
getRepositoryNwo(),
|
||||||
|
logger,
|
||||||
|
uploadTarget.target,
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
statusReport: {
|
||||||
|
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||||
|
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||||
|
num_results_in_sarif: numResultInSarif,
|
||||||
|
},
|
||||||
|
sarifID,
|
||||||
|
};
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
}
|
}
|
||||||
|
|
||||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
|
||||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
|
||||||
|
|
||||||
const analysisKey = await api.getAnalysisKey();
|
|
||||||
const environment = actionsUtil.getRequiredInput("matrix");
|
|
||||||
sarif = populateRunAutomationDetails(
|
|
||||||
sarif,
|
|
||||||
category,
|
|
||||||
analysisKey,
|
|
||||||
environment,
|
|
||||||
);
|
|
||||||
|
|
||||||
const toolNames = util.getToolNames(sarif);
|
|
||||||
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
|
||||||
|
|
||||||
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
|
|
||||||
if (dumpDir) {
|
|
||||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
|
||||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
|
||||||
|
|
||||||
const payload = buildPayload(
|
|
||||||
await gitUtils.getCommitOid(checkoutPath),
|
|
||||||
await gitUtils.getRef(),
|
|
||||||
analysisKey,
|
|
||||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
|
||||||
zippedSarif,
|
|
||||||
actionsUtil.getWorkflowRunID(),
|
|
||||||
actionsUtil.getWorkflowRunAttempt(),
|
|
||||||
checkoutURI,
|
|
||||||
environment,
|
|
||||||
toolNames,
|
|
||||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Log some useful debug info about the info
|
|
||||||
const rawUploadSizeBytes = sarifPayload.length;
|
|
||||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
|
||||||
const zippedUploadSizeBytes = zippedSarif.length;
|
|
||||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
|
||||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
|
||||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
|
||||||
|
|
||||||
// Make the upload
|
|
||||||
const sarifID = await uploadPayload(
|
|
||||||
payload,
|
|
||||||
getRepositoryNwo(),
|
|
||||||
logger,
|
|
||||||
uploadTarget.target,
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.endGroup();
|
|
||||||
|
|
||||||
return {
|
|
||||||
statusReport: {
|
|
||||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
|
||||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
|
||||||
num_results_in_sarif: numResultInSarif,
|
|
||||||
},
|
|
||||||
sarifID,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
Reference in New Issue
Block a user