mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 07:48:17 +08:00
Consistently use "post-processing"
This commit is contained in:
8
.github/workflows/__quality-queries.yml
generated
vendored
8
.github/workflows/__quality-queries.yml
generated
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
processed-sarif-path: ${{ runner.temp }}/processed
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -97,12 +97,12 @@ jobs:
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Upload processed SARIF
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: |
|
||||
processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/processed
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/post-processed
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
|
||||
@@ -70,10 +70,10 @@ inputs:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
processed-sarif-path:
|
||||
post-processed-sarif-path:
|
||||
description: >-
|
||||
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
|
||||
on them. Ordinarily, these processed SARIF files are not saved to disk. However, if a path is provided as an
|
||||
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
|
||||
argument for this input, they are written to the specified directory.
|
||||
required: false
|
||||
wait-for-processing:
|
||||
|
||||
36
lib/analyze-action.js
generated
36
lib/analyze-action.js
generated
@@ -95901,7 +95901,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = analysis.fixCategory(logger, category);
|
||||
@@ -95934,17 +95934,17 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
|
||||
);
|
||||
return { sarif, analysisKey, environment };
|
||||
}
|
||||
async function writeProcessedFiles(logger, pathInput, uploadTarget, processingResults) {
|
||||
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
|
||||
const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (outputPath !== void 0) {
|
||||
dumpSarifFile(
|
||||
JSON.stringify(processingResults.sarif),
|
||||
JSON.stringify(postProcessingResults.sarif),
|
||||
outputPath,
|
||||
logger,
|
||||
uploadTarget
|
||||
);
|
||||
} else {
|
||||
logger.debug(`Not writing processed SARIF files.`);
|
||||
logger.debug(`Not writing post-processed SARIF files.`);
|
||||
}
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
@@ -95970,16 +95970,16 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
category,
|
||||
uploadTarget
|
||||
);
|
||||
return uploadProcessedFiles(
|
||||
return uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
uploadTarget,
|
||||
processingResults
|
||||
);
|
||||
}
|
||||
async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processingResults) {
|
||||
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
const sarif = processingResults.sarif;
|
||||
const sarif = postProcessingResults.sarif;
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
@@ -95991,13 +95991,13 @@ async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processi
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
processingResults.analysisKey,
|
||||
postProcessingResults.analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
processingResults.environment,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
@@ -96191,7 +96191,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
}
|
||||
|
||||
// src/upload-sarif.ts
|
||||
async function processAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, processedOutputPath) {
|
||||
async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) {
|
||||
const sarifGroups = await getGroupedSarifFilePaths(
|
||||
logger,
|
||||
sarifPath
|
||||
@@ -96201,7 +96201,7 @@ async function processAndUploadSarif(logger, features, uploadKind, checkoutPath,
|
||||
sarifGroups
|
||||
)) {
|
||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
||||
const processingResults = await postProcessSarifFiles(
|
||||
const postProcessingResults = await postProcessSarifFiles(
|
||||
logger,
|
||||
features,
|
||||
checkoutPath,
|
||||
@@ -96209,18 +96209,18 @@ async function processAndUploadSarif(logger, features, uploadKind, checkoutPath,
|
||||
category,
|
||||
analysisConfig
|
||||
);
|
||||
await writeProcessedFiles(
|
||||
await writePostProcessedFiles(
|
||||
logger,
|
||||
processedOutputPath,
|
||||
postProcessedOutputPath,
|
||||
analysisConfig,
|
||||
processingResults
|
||||
postProcessingResults
|
||||
);
|
||||
if (uploadKind === "always") {
|
||||
uploadResults[analysisKind] = await uploadProcessedFiles(
|
||||
uploadResults[analysisKind] = await uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
analysisConfig,
|
||||
processingResults
|
||||
postProcessingResults
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -96428,14 +96428,14 @@ async function run() {
|
||||
const checkoutPath = getRequiredInput("checkout_path");
|
||||
const category = getOptionalInput("category");
|
||||
if (await features.getValue("analyze_use_new_upload" /* AnalyzeUseNewUpload */)) {
|
||||
uploadResults = await processAndUploadSarif(
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
getOptionalInput("processed-sarif-path")
|
||||
getOptionalInput("post-processed-sarif-path")
|
||||
);
|
||||
} else if (uploadKind === "always") {
|
||||
uploadResults = {};
|
||||
|
||||
12
lib/init-action-post.js
generated
12
lib/init-action-post.js
generated
@@ -133312,7 +133312,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = analysis.fixCategory(logger, category);
|
||||
@@ -133368,16 +133368,16 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
category,
|
||||
uploadTarget
|
||||
);
|
||||
return uploadProcessedFiles(
|
||||
return uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
uploadTarget,
|
||||
processingResults
|
||||
);
|
||||
}
|
||||
async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processingResults) {
|
||||
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
const sarif = processingResults.sarif;
|
||||
const sarif = postProcessingResults.sarif;
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
@@ -133389,13 +133389,13 @@ async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processi
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
processingResults.analysisKey,
|
||||
postProcessingResults.analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
processingResults.environment,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
|
||||
26
lib/upload-lib.js
generated
26
lib/upload-lib.js
generated
@@ -84855,11 +84855,11 @@ __export(upload_lib_exports, {
|
||||
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
|
||||
uploadFiles: () => uploadFiles,
|
||||
uploadPayload: () => uploadPayload,
|
||||
uploadProcessedFiles: () => uploadProcessedFiles,
|
||||
uploadPostProcessedFiles: () => uploadPostProcessedFiles,
|
||||
validateSarifFileSchema: () => validateSarifFileSchema,
|
||||
validateUniqueCategory: () => validateUniqueCategory,
|
||||
waitForProcessing: () => waitForProcessing,
|
||||
writeProcessedFiles: () => writeProcessedFiles
|
||||
writePostProcessedFiles: () => writePostProcessedFiles
|
||||
});
|
||||
module.exports = __toCommonJS(upload_lib_exports);
|
||||
var fs13 = __toESM(require("fs"));
|
||||
@@ -92715,7 +92715,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = analysis.fixCategory(logger, category);
|
||||
@@ -92748,17 +92748,17 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
|
||||
);
|
||||
return { sarif, analysisKey, environment };
|
||||
}
|
||||
async function writeProcessedFiles(logger, pathInput, uploadTarget, processingResults) {
|
||||
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
|
||||
const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (outputPath !== void 0) {
|
||||
dumpSarifFile(
|
||||
JSON.stringify(processingResults.sarif),
|
||||
JSON.stringify(postProcessingResults.sarif),
|
||||
outputPath,
|
||||
logger,
|
||||
uploadTarget
|
||||
);
|
||||
} else {
|
||||
logger.debug(`Not writing processed SARIF files.`);
|
||||
logger.debug(`Not writing post-processed SARIF files.`);
|
||||
}
|
||||
}
|
||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||
@@ -92784,16 +92784,16 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
category,
|
||||
uploadTarget
|
||||
);
|
||||
return uploadProcessedFiles(
|
||||
return uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
uploadTarget,
|
||||
processingResults
|
||||
);
|
||||
}
|
||||
async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processingResults) {
|
||||
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
const sarif = processingResults.sarif;
|
||||
const sarif = postProcessingResults.sarif;
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
@@ -92805,13 +92805,13 @@ async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processi
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
processingResults.analysisKey,
|
||||
postProcessingResults.analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
processingResults.environment,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
@@ -93019,11 +93019,11 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
throwIfCombineSarifFilesDisabled,
|
||||
uploadFiles,
|
||||
uploadPayload,
|
||||
uploadProcessedFiles,
|
||||
uploadPostProcessedFiles,
|
||||
validateSarifFileSchema,
|
||||
validateUniqueCategory,
|
||||
waitForProcessing,
|
||||
writeProcessedFiles
|
||||
writePostProcessedFiles
|
||||
});
|
||||
/*! Bundled license information:
|
||||
|
||||
|
||||
32
lib/upload-sarif-action.js
generated
32
lib/upload-sarif-action.js
generated
@@ -93371,7 +93371,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
||||
return payloadObj;
|
||||
}
|
||||
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = analysis.fixCategory(logger, category);
|
||||
@@ -93404,22 +93404,22 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
|
||||
);
|
||||
return { sarif, analysisKey, environment };
|
||||
}
|
||||
async function writeProcessedFiles(logger, pathInput, uploadTarget, processingResults) {
|
||||
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
|
||||
const outputPath = pathInput || process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||
if (outputPath !== void 0) {
|
||||
dumpSarifFile(
|
||||
JSON.stringify(processingResults.sarif),
|
||||
JSON.stringify(postProcessingResults.sarif),
|
||||
outputPath,
|
||||
logger,
|
||||
uploadTarget
|
||||
);
|
||||
} else {
|
||||
logger.debug(`Not writing processed SARIF files.`);
|
||||
logger.debug(`Not writing post-processed SARIF files.`);
|
||||
}
|
||||
}
|
||||
async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processingResults) {
|
||||
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
const sarif = processingResults.sarif;
|
||||
const sarif = postProcessingResults.sarif;
|
||||
const toolNames = getToolNames(sarif);
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||
@@ -93431,13 +93431,13 @@ async function uploadProcessedFiles(logger, checkoutPath, uploadTarget, processi
|
||||
const payload = buildPayload(
|
||||
await getCommitOid(checkoutPath),
|
||||
await getRef(),
|
||||
processingResults.analysisKey,
|
||||
postProcessingResults.analysisKey,
|
||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
getWorkflowRunID(),
|
||||
getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
processingResults.environment,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await determineBaseBranchHeadCommitOid()
|
||||
);
|
||||
@@ -93631,7 +93631,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
}
|
||||
|
||||
// src/upload-sarif.ts
|
||||
async function processAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, processedOutputPath) {
|
||||
async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) {
|
||||
const sarifGroups = await getGroupedSarifFilePaths(
|
||||
logger,
|
||||
sarifPath
|
||||
@@ -93641,7 +93641,7 @@ async function processAndUploadSarif(logger, features, uploadKind, checkoutPath,
|
||||
sarifGroups
|
||||
)) {
|
||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
||||
const processingResults = await postProcessSarifFiles(
|
||||
const postProcessingResults = await postProcessSarifFiles(
|
||||
logger,
|
||||
features,
|
||||
checkoutPath,
|
||||
@@ -93649,18 +93649,18 @@ async function processAndUploadSarif(logger, features, uploadKind, checkoutPath,
|
||||
category,
|
||||
analysisConfig
|
||||
);
|
||||
await writeProcessedFiles(
|
||||
await writePostProcessedFiles(
|
||||
logger,
|
||||
processedOutputPath,
|
||||
postProcessedOutputPath,
|
||||
analysisConfig,
|
||||
processingResults
|
||||
postProcessingResults
|
||||
);
|
||||
if (uploadKind === "always") {
|
||||
uploadResults[analysisKind] = await uploadProcessedFiles(
|
||||
uploadResults[analysisKind] = await uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
analysisConfig,
|
||||
processingResults
|
||||
postProcessingResults
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -93714,7 +93714,7 @@ async function run() {
|
||||
const sarifPath = getRequiredInput("sarif_file");
|
||||
const checkoutPath = getRequiredInput("checkout_path");
|
||||
const category = getOptionalInput("category");
|
||||
const uploadResults = await processAndUploadSarif(
|
||||
const uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
"always",
|
||||
|
||||
@@ -36,7 +36,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
processed-sarif-path: "${{ runner.temp }}/processed"
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -53,12 +53,12 @@ steps:
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload processed SARIF
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: |
|
||||
processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/processed"
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/post-processed"
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
|
||||
@@ -52,7 +52,7 @@ import {
|
||||
} from "./trap-caching";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import { processAndUploadSarif } from "./upload-sarif";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
interface AnalysisStatusReport
|
||||
@@ -352,14 +352,14 @@ async function run() {
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
|
||||
uploadResults = await processAndUploadSarif(
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
actionsUtil.getOptionalInput("processed-sarif-path"),
|
||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
||||
);
|
||||
} else if (uploadKind === "always") {
|
||||
uploadResults = {};
|
||||
|
||||
@@ -715,7 +715,7 @@ export async function postProcessSarifFiles(
|
||||
category: string | undefined,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
): Promise<PostProcessingResults> {
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
|
||||
@@ -760,18 +760,18 @@ export async function postProcessSarifFiles(
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`.
|
||||
* Writes the post-processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param pathInput The input provided for `processed-sarif-path`.
|
||||
* @param pathInput The input provided for `post-processed-sarif-path`.
|
||||
* @param uploadTarget The upload target.
|
||||
* @param processingResults The results of post-processing SARIF files.
|
||||
*/
|
||||
export async function writeProcessedFiles(
|
||||
export async function writePostProcessedFiles(
|
||||
logger: Logger,
|
||||
pathInput: string | undefined,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
processingResults: PostProcessingResults,
|
||||
postProcessingResults: PostProcessingResults,
|
||||
) {
|
||||
// If there's an explicit input, use that. Otherwise, use the value from the environment variable.
|
||||
const outputPath = pathInput || process.env[EnvVar.SARIF_DUMP_DIR];
|
||||
@@ -779,13 +779,13 @@ export async function writeProcessedFiles(
|
||||
// If we have an output path, write the SARIF file to it.
|
||||
if (outputPath !== undefined) {
|
||||
dumpSarifFile(
|
||||
JSON.stringify(processingResults.sarif),
|
||||
JSON.stringify(postProcessingResults.sarif),
|
||||
outputPath,
|
||||
logger,
|
||||
uploadTarget,
|
||||
);
|
||||
} else {
|
||||
logger.debug(`Not writing processed SARIF files.`);
|
||||
logger.debug(`Not writing post-processed SARIF files.`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -836,7 +836,7 @@ async function uploadSpecifiedFiles(
|
||||
uploadTarget,
|
||||
);
|
||||
|
||||
return uploadProcessedFiles(
|
||||
return uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
uploadTarget,
|
||||
@@ -845,25 +845,24 @@ async function uploadSpecifiedFiles(
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the results of processing SARIF files to the specified upload target.
|
||||
* Uploads the results of post-processing SARIF files to the specified upload target.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param checkoutPath The path at which the repository was checked out.
|
||||
* @param uploadTarget The analysis configuration.
|
||||
* @param processingResults The results of post-processing SARIF files.
|
||||
* @param postProcessingResults The results of post-processing SARIF files.
|
||||
*
|
||||
* @returns The results of uploading the `processingResults` to `uploadTarget`.
|
||||
* @returns The results of uploading the `postProcessingResults` to `uploadTarget`.
|
||||
*/
|
||||
export async function uploadProcessedFiles(
|
||||
export async function uploadPostProcessedFiles(
|
||||
logger: Logger,
|
||||
checkoutPath: string,
|
||||
uploadTarget: analyses.AnalysisConfig,
|
||||
processingResults: PostProcessingResults,
|
||||
postProcessingResults: PostProcessingResults,
|
||||
): Promise<UploadResult> {
|
||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||
|
||||
const sarif = processingResults.sarif;
|
||||
|
||||
const sarif = postProcessingResults.sarif;
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
@@ -878,13 +877,13 @@ export async function uploadProcessedFiles(
|
||||
const payload = buildPayload(
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
processingResults.analysisKey,
|
||||
postProcessingResults.analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
actionsUtil.getWorkflowRunID(),
|
||||
actionsUtil.getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
processingResults.environment,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
);
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
isThirdPartyAnalysis,
|
||||
} from "./status-report";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { processAndUploadSarif } from "./upload-sarif";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import {
|
||||
ConfigurationError,
|
||||
checkActionVersion,
|
||||
@@ -90,7 +90,7 @@ async function run() {
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
const uploadResults = await processAndUploadSarif(
|
||||
const uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
"always",
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getRunnerLogger } from "./logging";
|
||||
import { createFeatures, setupTests } from "./testing-utils";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { processAndUploadSarif } from "./upload-sarif";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -39,7 +39,7 @@ function mockPostProcessSarifFiles() {
|
||||
return postProcessSarifFiles;
|
||||
}
|
||||
|
||||
const processAndUploadSarifMacro = test.macro({
|
||||
const postProcessAndUploadSarifMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
sarifFiles: string[],
|
||||
@@ -54,14 +54,14 @@ const processAndUploadSarifMacro = test.macro({
|
||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
||||
|
||||
const postProcessSarifFiles = mockPostProcessSarifFiles();
|
||||
const uploadProcessedFiles = sinon.stub(
|
||||
const uploadPostProcessedFiles = sinon.stub(
|
||||
uploadLib,
|
||||
"uploadProcessedFiles",
|
||||
"uploadPostProcessedFiles",
|
||||
);
|
||||
|
||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
||||
uploadProcessedFiles
|
||||
uploadPostProcessedFiles
|
||||
.withArgs(logger, sinon.match.any, analysisConfig, sinon.match.any)
|
||||
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
|
||||
}
|
||||
@@ -71,7 +71,7 @@ const processAndUploadSarifMacro = test.macro({
|
||||
fs.writeFileSync(sarifFile, "");
|
||||
}
|
||||
|
||||
const actual = await processAndUploadSarif(
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
"always",
|
||||
@@ -104,7 +104,7 @@ const processAndUploadSarifMacro = test.macro({
|
||||
t.is(actual[analysisKind], undefined);
|
||||
// Therefore, we also check that the mocked `uploadProcessedFiles` was not called for this analysis kind.
|
||||
t.assert(
|
||||
!uploadProcessedFiles.calledWith(
|
||||
!uploadPostProcessedFiles.calledWith(
|
||||
logger,
|
||||
sinon.match.any,
|
||||
getAnalysisConfig(analysisKind),
|
||||
@@ -121,7 +121,7 @@ const processAndUploadSarifMacro = test.macro({
|
||||
|
||||
test(
|
||||
"SARIF file",
|
||||
processAndUploadSarifMacro,
|
||||
postProcessAndUploadSarifMacro,
|
||||
["test.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.sarif"),
|
||||
{
|
||||
@@ -136,7 +136,7 @@ test(
|
||||
|
||||
test(
|
||||
"JSON file",
|
||||
processAndUploadSarifMacro,
|
||||
postProcessAndUploadSarifMacro,
|
||||
["test.json"],
|
||||
(tempDir) => path.join(tempDir, "test.json"),
|
||||
{
|
||||
@@ -151,7 +151,7 @@ test(
|
||||
|
||||
test(
|
||||
"Code Scanning files",
|
||||
processAndUploadSarifMacro,
|
||||
postProcessAndUploadSarifMacro,
|
||||
["test.json", "test.sarif"],
|
||||
undefined,
|
||||
{
|
||||
@@ -167,7 +167,7 @@ test(
|
||||
|
||||
test(
|
||||
"Code Quality file",
|
||||
processAndUploadSarifMacro,
|
||||
postProcessAndUploadSarifMacro,
|
||||
["test.quality.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.quality.sarif"),
|
||||
{
|
||||
@@ -182,7 +182,7 @@ test(
|
||||
|
||||
test(
|
||||
"Mixed files",
|
||||
processAndUploadSarifMacro,
|
||||
postProcessAndUploadSarifMacro,
|
||||
["test.sarif", "test.quality.sarif"],
|
||||
undefined,
|
||||
{
|
||||
@@ -203,7 +203,7 @@ test(
|
||||
},
|
||||
);
|
||||
|
||||
test("processAndUploadSarif doesn't upload if upload is disabled", async (t) => {
|
||||
test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const features = createFeatures([]);
|
||||
@@ -211,12 +211,15 @@ test("processAndUploadSarif doesn't upload if upload is disabled", async (t) =>
|
||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
||||
|
||||
const postProcessSarifFiles = mockPostProcessSarifFiles();
|
||||
const uploadProcessedFiles = sinon.stub(uploadLib, "uploadProcessedFiles");
|
||||
const uploadPostProcessedFiles = sinon.stub(
|
||||
uploadLib,
|
||||
"uploadPostProcessedFiles",
|
||||
);
|
||||
|
||||
fs.writeFileSync(toFullPath("test.sarif"), "");
|
||||
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
|
||||
|
||||
const actual = await processAndUploadSarif(
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
"never",
|
||||
@@ -226,11 +229,11 @@ test("processAndUploadSarif doesn't upload if upload is disabled", async (t) =>
|
||||
|
||||
t.truthy(actual);
|
||||
t.assert(postProcessSarifFiles.calledTwice);
|
||||
t.assert(uploadProcessedFiles.notCalled);
|
||||
t.assert(uploadPostProcessedFiles.notCalled);
|
||||
});
|
||||
});
|
||||
|
||||
test("processAndUploadSarif writes processed SARIF files if output directory is provided", async (t) => {
|
||||
test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const features = createFeatures([]);
|
||||
@@ -242,22 +245,22 @@ test("processAndUploadSarif writes processed SARIF files if output directory is
|
||||
fs.writeFileSync(toFullPath("test.sarif"), "");
|
||||
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
|
||||
|
||||
const processedOutPath = path.join(tempDir, "processed");
|
||||
const actual = await processAndUploadSarif(
|
||||
const postProcessedOutPath = path.join(tempDir, "post-processed");
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
"never",
|
||||
"",
|
||||
tempDir,
|
||||
"",
|
||||
processedOutPath,
|
||||
postProcessedOutPath,
|
||||
);
|
||||
|
||||
t.truthy(actual);
|
||||
t.assert(postProcessSarifFiles.calledTwice);
|
||||
t.assert(fs.existsSync(path.join(processedOutPath, "upload.sarif")));
|
||||
t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif")));
|
||||
t.assert(
|
||||
fs.existsSync(path.join(processedOutPath, "upload.quality.sarif")),
|
||||
fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,7 +11,7 @@ export type UploadSarifResults = Partial<
|
||||
>;
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
|
||||
* Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about enabled features.
|
||||
@@ -19,18 +19,18 @@ export type UploadSarifResults = Partial<
|
||||
* @param checkoutPath The path where the repository was checked out at.
|
||||
* @param sarifPath The path to the file or directory to upload.
|
||||
* @param category The analysis category.
|
||||
* @param processedOutputPath The path to a directory to which the post-processed SARIF files should be written to.
|
||||
* @param postProcessedOutputPath The path to a directory to which the post-processed SARIF files should be written to.
|
||||
*
|
||||
* @returns A partial mapping from analysis kinds to the upload results.
|
||||
*/
|
||||
export async function processAndUploadSarif(
|
||||
export async function postProcessAndUploadSarif(
|
||||
logger: Logger,
|
||||
features: FeatureEnablement,
|
||||
uploadKind: UploadKind,
|
||||
checkoutPath: string,
|
||||
sarifPath: string,
|
||||
category?: string,
|
||||
processedOutputPath?: string,
|
||||
postProcessedOutputPath?: string,
|
||||
): Promise<UploadSarifResults> {
|
||||
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
|
||||
logger,
|
||||
@@ -42,7 +42,7 @@ export async function processAndUploadSarif(
|
||||
sarifGroups,
|
||||
)) {
|
||||
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
|
||||
const processingResults = await upload_lib.postProcessSarifFiles(
|
||||
const postProcessingResults = await upload_lib.postProcessSarifFiles(
|
||||
logger,
|
||||
features,
|
||||
checkoutPath,
|
||||
@@ -51,22 +51,22 @@ export async function processAndUploadSarif(
|
||||
analysisConfig,
|
||||
);
|
||||
|
||||
// Write the processed SARIF files to disk. This will only write them if needed based on user inputs
|
||||
// Write the post-processed SARIF files to disk. This will only write them if needed based on user inputs
|
||||
// or environment variables.
|
||||
await upload_lib.writeProcessedFiles(
|
||||
await upload_lib.writePostProcessedFiles(
|
||||
logger,
|
||||
processedOutputPath,
|
||||
postProcessedOutputPath,
|
||||
analysisConfig,
|
||||
processingResults,
|
||||
postProcessingResults,
|
||||
);
|
||||
|
||||
// Only perform the actual upload of the processed files, if `uploadKind` is `always`.
|
||||
// Only perform the actual upload of the post-processed files if `uploadKind` is `always`.
|
||||
if (uploadKind === "always") {
|
||||
uploadResults[analysisKind] = await upload_lib.uploadProcessedFiles(
|
||||
uploadResults[analysisKind] = await upload_lib.uploadPostProcessedFiles(
|
||||
logger,
|
||||
checkoutPath,
|
||||
analysisConfig,
|
||||
processingResults,
|
||||
postProcessingResults,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user