mirror of
https://github.com/github/codeql-action.git
synced 2025-12-24 16:20:09 +08:00
Compare commits
4 Commits
default-se
...
redsun82/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fbfe02d3e | ||
|
|
53b268a8f0 | ||
|
|
33a31c1c92 | ||
|
|
a7fb336064 |
199
lib/analyze-action.js
generated
199
lib/analyze-action.js
generated
@@ -95581,98 +95581,113 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
}
|
||||
return payloadObj;
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate
|
||||
);
|
||||
return uploadSpecifiedFiles(
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget
|
||||
uploadTarget,
|
||||
uploadKind
|
||||
);
|
||||
}
|
||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
const upload = uploadKind === "always";
|
||||
if (!upload && !dumpDir) {
|
||||
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||
return void 0;
|
||||
}
|
||||
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||
try {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
if (!upload) {
|
||||
logger.info(
|
||||
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
}
|
||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||
if (!fs18.existsSync(outputDir)) {
|
||||
@@ -96033,21 +96048,26 @@ async function run() {
|
||||
}
|
||||
core14.setOutput("db-locations", dbLocations);
|
||||
core14.setOutput("sarif-output", import_path4.default.resolve(outputDir));
|
||||
const uploadInput = getOptionalInput("upload");
|
||||
if (runStats && getUploadValue(uploadInput) === "always") {
|
||||
const uploadInput = getUploadValue(
|
||||
getOptionalInput("upload")
|
||||
);
|
||||
if (runStats) {
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResult = await uploadFiles(
|
||||
uploadResult = await maybeUploadFiles(
|
||||
outputDir,
|
||||
getRequiredInput("checkout_path"),
|
||||
getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
CodeScanning
|
||||
CodeScanning,
|
||||
uploadInput
|
||||
);
|
||||
core14.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (uploadResult) {
|
||||
core14.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
}
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const qualityUploadResult = await uploadFiles(
|
||||
const qualityUploadResult = await maybeUploadFiles(
|
||||
outputDir,
|
||||
getRequiredInput("checkout_path"),
|
||||
fixCodeQualityCategory(
|
||||
@@ -96056,12 +96076,15 @@ async function run() {
|
||||
),
|
||||
features,
|
||||
logger,
|
||||
CodeQuality
|
||||
CodeQuality,
|
||||
uploadInput
|
||||
);
|
||||
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
if (qualityUploadResult) {
|
||||
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.info("Not uploading results");
|
||||
logger.info("No query status report, skipping upload");
|
||||
}
|
||||
await uploadOverlayBaseDatabaseToCache(codeql, config, logger);
|
||||
await uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger);
|
||||
|
||||
182
lib/init-action-post.js
generated
182
lib/init-action-post.js
generated
@@ -133019,97 +133019,123 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
return maybeUploadFiles(
|
||||
inputSarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
"always"
|
||||
);
|
||||
}
|
||||
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate
|
||||
);
|
||||
return uploadSpecifiedFiles(
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget
|
||||
uploadTarget,
|
||||
uploadKind
|
||||
);
|
||||
}
|
||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
const upload = uploadKind === "always";
|
||||
if (!upload && !dumpDir) {
|
||||
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||
return void 0;
|
||||
}
|
||||
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||
try {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
if (!upload) {
|
||||
logger.info(
|
||||
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
}
|
||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||
if (!fs17.existsSync(outputDir)) {
|
||||
|
||||
191
lib/upload-lib.js
generated
191
lib/upload-lib.js
generated
@@ -84782,6 +84782,7 @@ __export(upload_lib_exports, {
|
||||
buildPayload: () => buildPayload,
|
||||
findSarifFilesInDir: () => findSarifFilesInDir,
|
||||
getSarifFilePaths: () => getSarifFilePaths,
|
||||
maybeUploadFiles: () => maybeUploadFiles,
|
||||
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
||||
readSarifFile: () => readSarifFile,
|
||||
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
|
||||
@@ -92391,97 +92392,134 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
return maybeUploadFiles(
|
||||
inputSarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
"always"
|
||||
);
|
||||
}
|
||||
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate
|
||||
);
|
||||
return uploadSpecifiedFiles(
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget
|
||||
uploadTarget,
|
||||
uploadKind
|
||||
);
|
||||
}
|
||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
features,
|
||||
logger,
|
||||
uploadTarget.target
|
||||
uploadTarget,
|
||||
"always"
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
}
|
||||
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
const upload = uploadKind === "always";
|
||||
if (!upload && !dumpDir) {
|
||||
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||
return void 0;
|
||||
}
|
||||
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||
try {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
if (!upload) {
|
||||
logger.info(
|
||||
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||
if (!fs13.existsSync(outputDir)) {
|
||||
@@ -92655,6 +92693,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
buildPayload,
|
||||
findSarifFilesInDir,
|
||||
getSarifFilePaths,
|
||||
maybeUploadFiles,
|
||||
populateRunAutomationDetails,
|
||||
readSarifFile,
|
||||
shouldConsiderConfigurationError,
|
||||
|
||||
189
lib/upload-sarif-action.js
generated
189
lib/upload-sarif-action.js
generated
@@ -93092,97 +93092,134 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
return maybeUploadFiles(
|
||||
inputSarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
"always"
|
||||
);
|
||||
}
|
||||
async function maybeUploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate
|
||||
);
|
||||
return uploadSpecifiedFiles(
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget
|
||||
uploadTarget,
|
||||
uploadKind
|
||||
);
|
||||
}
|
||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
features,
|
||||
logger,
|
||||
uploadTarget.target
|
||||
uploadTarget,
|
||||
"always"
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
}
|
||||
async function maybeUploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget, uploadKind) {
|
||||
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
const upload = uploadKind === "always";
|
||||
if (!upload && !dumpDir) {
|
||||
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||
return void 0;
|
||||
}
|
||||
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||
try {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await addFingerprints(sarif, checkoutPath, logger);
|
||||
const analysisKey = await getAnalysisKey();
|
||||
const environment = getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment
|
||||
);
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
if (!upload) {
|
||||
logger.info(
|
||||
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`
|
||||
);
|
||||
return void 0;
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
);
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif
|
||||
},
|
||||
sarifID
|
||||
};
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
}
|
||||
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
||||
if (!fs14.existsSync(outputDir)) {
|
||||
|
||||
@@ -330,22 +330,27 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||
const uploadInput = actionsUtil.getUploadValue(
|
||||
actionsUtil.getOptionalInput("upload"),
|
||||
);
|
||||
if (runStats) {
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResult = await uploadLib.uploadFiles(
|
||||
uploadResult = await uploadLib.maybeUploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
uploadInput,
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
if (uploadResult) {
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||
const qualityUploadResult = await uploadLib.maybeUploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.fixCodeQualityCategory(
|
||||
@@ -355,11 +360,14 @@ async function run() {
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
uploadInput,
|
||||
);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
if (qualityUploadResult) {
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.info("Not uploading results");
|
||||
logger.info("No query status report, skipping upload");
|
||||
}
|
||||
|
||||
// Possibly upload the overlay-base database to actions cache.
|
||||
|
||||
@@ -623,18 +623,44 @@ export async function uploadFiles(
|
||||
logger: Logger,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
): Promise<UploadResult> {
|
||||
return maybeUploadFiles(
|
||||
inputSarifPath,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
"always",
|
||||
) as Promise<UploadResult>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
||||
* to. It will only upload if `uploadKind === "always"`, and return `undefined` otherwise. However
|
||||
* if `CODEQL_ACTION_SARIF_DUMP_DIR` is set, it will unconditionally process the input sarif files.
|
||||
*/
|
||||
export async function maybeUploadFiles(
|
||||
inputSarifPath: string,
|
||||
checkoutPath: string,
|
||||
category: string | undefined,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
uploadKind: actionsUtil.UploadKind,
|
||||
): Promise<UploadResult | undefined> {
|
||||
const sarifPaths = getSarifFilePaths(
|
||||
inputSarifPath,
|
||||
uploadTarget.sarifPredicate,
|
||||
);
|
||||
|
||||
return uploadSpecifiedFiles(
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
uploadKind,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -649,103 +675,137 @@ export async function uploadSpecifiedFiles(
|
||||
logger: Logger,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
): Promise<UploadResult> {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
return maybeUploadSpecifiedFiles(
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
uploadTarget,
|
||||
"always",
|
||||
) as Promise<UploadResult>;
|
||||
}
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
async function maybeUploadSpecifiedFiles(
|
||||
sarifPaths: string[],
|
||||
checkoutPath: string,
|
||||
category: string | undefined,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
uploadKind: actionsUtil.UploadKind,
|
||||
): Promise<UploadResult | undefined> {
|
||||
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
|
||||
const upload = uploadKind === "always";
|
||||
if (!upload && !dumpDir) {
|
||||
logger.info(`Skipping upload of ${uploadTarget.name} results`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let sarif: SarifFile;
|
||||
logger.startGroup(`Processing ${uploadTarget.name} results`);
|
||||
try {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
|
||||
if (sarifPaths.length > 1) {
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
|
||||
let sarif: SarifFile;
|
||||
|
||||
if (sarifPaths.length > 1) {
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(parsedSarif, sarifPath, logger);
|
||||
}
|
||||
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
|
||||
// Validate that there are no runs for the same category
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
}
|
||||
|
||||
sarif = await combineSarifFilesUsingCLI(
|
||||
sarifPaths,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger,
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||
|
||||
const analysisKey = await api.getAnalysisKey();
|
||||
const environment = actionsUtil.getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment,
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
sarif = readSarifFile(sarifPath);
|
||||
validateSarifFileSchema(sarif, sarifPath, logger);
|
||||
|
||||
// Validate that there are no runs for the same category
|
||||
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
|
||||
if (!upload) {
|
||||
logger.info(
|
||||
`Skipping upload of ${uploadTarget.name} results because upload kind is "${uploadKind}"`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
|
||||
const payload = buildPayload(
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
actionsUtil.getWorkflowRunID(),
|
||||
actionsUtil.getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
|
||||
// Make the upload
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target,
|
||||
);
|
||||
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
},
|
||||
sarifID,
|
||||
};
|
||||
} finally {
|
||||
logger.endGroup();
|
||||
}
|
||||
|
||||
sarif = filterAlertsByDiffRange(logger, sarif);
|
||||
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
|
||||
|
||||
const analysisKey = await api.getAnalysisKey();
|
||||
const environment = actionsUtil.getRequiredInput("matrix");
|
||||
sarif = populateRunAutomationDetails(
|
||||
sarif,
|
||||
category,
|
||||
analysisKey,
|
||||
environment,
|
||||
);
|
||||
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
|
||||
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
|
||||
if (dumpDir) {
|
||||
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||
}
|
||||
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
|
||||
const payload = buildPayload(
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
actionsUtil.getWorkflowRunID(),
|
||||
actionsUtil.getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
const zippedUploadSizeBytes = zippedSarif.length;
|
||||
logger.debug(`Base64 zipped upload size: ${zippedUploadSizeBytes} bytes`);
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
logger.debug(`Number of results in upload: ${numResultInSarif}`);
|
||||
|
||||
// Make the upload
|
||||
const sarifID = await uploadPayload(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target,
|
||||
);
|
||||
|
||||
logger.endGroup();
|
||||
|
||||
return {
|
||||
statusReport: {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
},
|
||||
sarifID,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user