Send new per-query alert count event reports for QA telemetry (#1741)

This commit is contained in:
Angela P Wen
2023-06-30 07:53:13 -07:00
committed by GitHub
parent cff3d9e3c9
commit 46a6823b81
19 changed files with 213 additions and 51 deletions

View File

@@ -315,6 +315,25 @@ export type ActionStatus =
| "failure"
| "user-error";
// Any status report may include an array of EventReports associated with it.
export interface EventReport {
/** An enumerable description of the event. */
event: string;
/** Time this event started. */
started_at: string;
/** Time this event ended. */
completed_at: string;
/** eg: `success`, `failure`, `timeout`, etc. */
exit_status?: string;
/** If the event is language-specific. */
language?: string;
/**
* A generic JSON blob of data related to this event.
* Use Object.assign() to append additional fields to the object.
*/
properties?: object;
}
export interface StatusReportBase {
/**
* UUID representing the job run that this status report belongs to. We

View File

@@ -28,13 +28,13 @@ import {
CODEQL_ACTION_DID_AUTOBUILD_GOLANG,
} from "./shared-environment";
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
import * as upload_lib from "./upload-lib";
import * as uploadLib from "./upload-lib";
import { UploadResult } from "./upload-lib";
import * as util from "./util";
import { checkForTimeout, wrapError } from "./util";
interface AnalysisStatusReport
extends upload_lib.UploadStatusReport,
extends uploadLib.UploadStatusReport,
QueriesStatusReport {}
interface FinishStatusReport
@@ -269,7 +269,7 @@ async function run() {
core.setOutput("db-locations", dbLocations);
const uploadInput = actionsUtil.getOptionalInput("upload");
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
uploadResult = await upload_lib.uploadFromActions(
uploadResult = await uploadLib.uploadFromActions(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
@@ -296,7 +296,7 @@ async function run() {
uploadResult !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await upload_lib.waitForProcessing(
await uploadLib.waitForProcessing(
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
uploadResult.sarifID,
getActionsLogger()

View File

@@ -18,13 +18,18 @@ import { Feature } from "./feature-flags";
import { Language } from "./languages";
import { getRunnerLogger } from "./logging";
import { setupTests, setupActionsVars, createFeatures } from "./testing-utils";
import * as uploadLib from "./upload-lib";
import * as util from "./util";
setupTests(test);
// Checks that the duration fields are populated for the correct language
// and correct case of builtin or custom. Also checks the correct search
// paths are set in the database analyze invocation.
/** Checks that the duration fields are populated for the correct language
* and correct case of builtin or custom. Also checks the correct search
* paths are set in the database analyze invocation.
*
* Mocks the QA telemetry feature flag and checks the appropriate status report
* fields.
*/
test("status report fields and search path setting", async (t) => {
let searchPathsUsed: Array<string | undefined> = [];
return await util.withTmpDir(async (tmpDir) => {
@@ -38,6 +43,8 @@ test("status report fields and search path setting", async (t) => {
[Language.java]: ["c/d@2.0.0"],
};
sinon.stub(uploadLib, "validateSarifFileSchema");
for (const language of Object.values(Language)) {
setCodeQL({
packDownload: async () => ({ packs: [] }),
@@ -135,12 +142,12 @@ test("status report fields and search path setting", async (t) => {
undefined,
config,
getRunnerLogger(true),
createFeatures([])
createFeatures([Feature.QaTelemetryEnabled])
);
const hasPacks = language in packs;
const statusReportKeys = Object.keys(builtinStatusReport).sort();
if (hasPacks) {
t.deepEqual(statusReportKeys.length, 3, statusReportKeys.toString());
t.deepEqual(statusReportKeys.length, 4, statusReportKeys.toString());
t.deepEqual(
statusReportKeys[0],
`analyze_builtin_queries_${language}_duration_ms`
@@ -149,8 +156,9 @@ test("status report fields and search path setting", async (t) => {
statusReportKeys[1],
`analyze_custom_queries_${language}_duration_ms`
);
t.deepEqual(statusReportKeys[2], "event_reports");
t.deepEqual(
statusReportKeys[2],
statusReportKeys[3],
`interpret_results_${language}_duration_ms`
);
} else {
@@ -158,11 +166,17 @@ test("status report fields and search path setting", async (t) => {
statusReportKeys[0],
`analyze_builtin_queries_${language}_duration_ms`
);
t.deepEqual(statusReportKeys[1], "event_reports");
t.deepEqual(
statusReportKeys[1],
statusReportKeys[2],
`interpret_results_${language}_duration_ms`
);
}
if (builtinStatusReport.event_reports) {
for (const eventReport of builtinStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
config.queries[language] = {
builtin: [],
@@ -185,9 +199,9 @@ test("status report fields and search path setting", async (t) => {
undefined,
config,
getRunnerLogger(true),
createFeatures([])
createFeatures([Feature.QaTelemetryEnabled])
);
t.deepEqual(Object.keys(customStatusReport).length, 2);
t.deepEqual(Object.keys(customStatusReport).length, 3);
t.true(
`analyze_custom_queries_${language}_duration_ms` in customStatusReport
);
@@ -196,6 +210,12 @@ test("status report fields and search path setting", async (t) => {
: [undefined, "/1", "/2"];
t.deepEqual(searchPathsUsed, expectedSearchPathsUsed);
t.true(`interpret_results_${language}_duration_ms` in customStatusReport);
t.true("event_reports" in customStatusReport);
if (customStatusReport.event_reports) {
for (const eventReport of customStatusReport.event_reports) {
t.deepEqual(eventReport.event, "codeql database interpret-results");
}
}
}
verifyQuerySuites(tmpDir);

View File

@@ -6,7 +6,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
import del from "del";
import * as yaml from "js-yaml";
import { DatabaseCreationTimings } from "./actions-util";
import { DatabaseCreationTimings, EventReport } from "./actions-util";
import * as analysisPaths from "./analysis-paths";
import { CodeQL, getCodeQL } from "./codeql";
import * as configUtils from "./config-utils";
@@ -14,6 +14,7 @@ import { FeatureEnablement, Feature } from "./feature-flags";
import { isScannedLanguage, Language } from "./languages";
import { Logger } from "./logging";
import { endTracingForCluster } from "./tracer-config";
import { validateSarifFileSchema } from "./upload-lib";
import * as util from "./util";
export class CodeQLAnalysisError extends Error {
@@ -78,6 +79,8 @@ export interface QueriesStatusReport {
interpret_results_swift_duration_ms?: number;
/** Name of language that errored during analysis (or undefined if no language failed). */
analyze_failure_language?: string;
/** Reports on discrete events associated with this status report. */
event_reports?: EventReport[];
}
async function setupPythonExtractor(
@@ -242,6 +245,9 @@ export async function runQueries(
const packsWithVersion = config.packs[language] || [];
try {
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
let startTimeInterpretResults: number;
let endTimeInterpretResults: number;
if (await util.useCodeScanningConfigInCli(codeql, features)) {
// If we are using the code scanning config in the CLI,
// much of the work needed to generate the query suites
@@ -257,16 +263,16 @@ export async function runQueries(
new Date().getTime() - startTimeBuiltIn;
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(
language,
undefined,
sarifFile,
config.debugMode
);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
} else {
@@ -342,19 +348,37 @@ export async function runQueries(
}
logger.endGroup();
logger.startGroup(`Interpreting results for ${language}`);
const startTimeInterpretResults = new Date().getTime();
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
startTimeInterpretResults = new Date().getTime();
const analysisSummary = await runInterpretResults(
language,
querySuitePaths,
sarifFile,
config.debugMode
);
endTimeInterpretResults = new Date().getTime();
statusReport[`interpret_results_${language}_duration_ms`] =
new Date().getTime() - startTimeInterpretResults;
endTimeInterpretResults - startTimeInterpretResults;
logger.endGroup();
logger.info(analysisSummary);
}
if (await features.getValue(Feature.QaTelemetryEnabled)) {
const perQueryAlertCounts = getPerQueryAlertCounts(sarifFile, logger);
const perQueryAlertCountEventReport: EventReport = {
event: "codeql database interpret-results",
started_at: startTimeInterpretResults.toString(),
completed_at: endTimeInterpretResults.toString(),
exit_status: "success",
language,
properties: perQueryAlertCounts,
};
if (statusReport["event_reports"] === undefined) {
statusReport["event_reports"] = [];
}
statusReport["event_reports"].push(perQueryAlertCountEventReport);
}
await runPrintLinesOfCode(language);
} catch (e) {
logger.info(String(e));
@@ -392,6 +416,34 @@ export async function runQueries(
);
}
/** Get an object with all queries and their counts parsed from a SARIF file path. */
function getPerQueryAlertCounts(
sarifPath: string,
log: Logger
): Record<string, number> {
validateSarifFileSchema(sarifPath, log);
const sarifObject = JSON.parse(
fs.readFileSync(sarifPath, "utf8")
) as util.SarifFile;
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object
const perQueryAlertCounts: Record<string, number> = {};
// All rules (queries), from all results, from all runs
for (const sarifRun of sarifObject.runs) {
if (sarifRun.results) {
for (const result of sarifRun.results) {
const query = result.rule?.id || result.ruleId;
if (query) {
perQueryAlertCounts[query] = (perQueryAlertCounts[query] || 0) + 1;
}
}
}
}
return perQueryAlertCounts;
}
async function runPrintLinesOfCode(language: Language): Promise<string> {
const databasePath = util.getCodeQLDatabasePath(config, language);
return await codeql.databasePrintBaseline(databasePath);

View File

@@ -43,6 +43,7 @@ export enum Feature {
ExportCodeScanningConfigEnabled = "export_code_scanning_config_enabled",
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
QaTelemetryEnabled = "qa_telemetry_enabled",
UploadFailedSarifEnabled = "upload_failed_sarif_enabled",
}
@@ -76,6 +77,11 @@ export const featureConfig: Record<
minimumVersion: "2.7.5",
defaultValue: false,
},
[Feature.QaTelemetryEnabled]: {
envVar: "CODEQL_ACTION_QA_TELEMETRY",
minimumVersion: undefined,
defaultValue: false,
},
[Feature.UploadFailedSarifEnabled]: {
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
minimumVersion: "2.11.3",

View File

@@ -20,7 +20,7 @@ import * as workflow from "./workflow";
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
export function combineSarifFiles(sarifFiles: string[]): SarifFile {
function combineSarifFiles(sarifFiles: string[]): SarifFile {
const combinedSarif: SarifFile = {
version: null,
runs: [],
@@ -198,7 +198,7 @@ function getSarifFilePaths(sarifPath: string) {
}
// Counts the number of results in the given SARIF file
export function countResultsInSarif(sarif: string): number {
function countResultsInSarif(sarif: string): number {
let numResults = 0;
let parsedSarif;
try {
@@ -224,7 +224,7 @@ export function countResultsInSarif(sarif: string): number {
// Validates that the given file path refers to a valid SARIF file.
// Throws an error if the file is invalid.
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8"));
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
const result = new jsonschema.Validator().validate(sarif, schema);

View File

@@ -72,6 +72,9 @@ export interface SarifInvocation {
export interface SarifResult {
ruleId?: string;
rule?: {
id?: string;
};
message?: {
text?: string;
};