Merge pull request #3136 from github/mbg/dep-caching/telemetry

Add telemetry for dependency caching
This commit is contained in:
Michael B. Gale
2025-09-29 16:01:30 +01:00
committed by GitHub
11 changed files with 253 additions and 28 deletions

29
lib/analyze-action.js generated
View File

@@ -93338,6 +93338,7 @@ async function makeGlobber(patterns) {
return glob.create(patterns.join("\n"));
}
async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
const status = [];
for (const language of config.languages) {
const cacheConfig = getDefaultCacheConfig()[language];
if (cacheConfig === void 0) {
@@ -93348,6 +93349,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
}
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, result: "no-hash" /* NoHash */ });
logger.info(
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
);
@@ -93355,6 +93357,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
}
const size = await getTotalCacheSize(cacheConfig.paths, logger, true);
if (size === 0) {
status.push({ language, result: "empty" /* Empty */ });
logger.info(
`Skipping upload of dependency cache for ${language} since it is empty.`
);
@@ -93365,18 +93368,28 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
`Uploading cache of size ${size} for ${language} with key ${key}...`
);
try {
const start = performance.now();
await actionsCache3.saveCache(cacheConfig.paths, key);
const upload_duration_ms = Math.round(performance.now() - start);
status.push({
language,
result: "stored" /* Stored */,
upload_size_bytes: Math.round(size),
upload_duration_ms
});
} catch (error2) {
if (error2 instanceof actionsCache3.ReserveCacheError) {
logger.info(
`Not uploading cache for ${language}, because ${key} is already in use.`
);
logger.debug(error2.message);
status.push({ language, result: "duplicate" /* Duplicate */ });
} else {
throw error2;
}
}
}
return status;
}
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
const hash2 = await glob.hashFiles(cacheConfig.hash.join("\n"));
@@ -95941,7 +95954,7 @@ function filterAlertsByDiffRange(logger, sarif) {
}
// src/analyze-action.ts
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, logger) {
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, dependencyCacheResults, logger) {
const status = getActionsStatus(error2, stats?.analyze_failure_language);
const statusReportBase = await createStatusReportBase(
"finish" /* Analyze */,
@@ -95958,7 +95971,8 @@ async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUplo
...statusReportBase,
...stats || {},
...dbCreationTimings || {},
...trapCacheCleanup || {}
...trapCacheCleanup || {},
dependency_caching_upload_results: dependencyCacheResults
};
if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport = {
@@ -96039,6 +96053,7 @@ async function run() {
let trapCacheUploadTime = void 0;
let dbCreationTimings = void 0;
let didUploadTrapCaches = false;
let dependencyCacheResults;
initializeEnvironment(getActionVersion());
persistInputs();
const logger = getActionsLogger();
@@ -96177,7 +96192,11 @@ async function run() {
"java_minimize_dependency_jars" /* JavaMinimizeDependencyJars */,
codeql
);
await uploadDependencyCaches(config, logger, minimizeJavaJars);
dependencyCacheResults = await uploadDependencyCaches(
config,
logger,
minimizeJavaJars
);
}
if (isInTestMode()) {
logger.debug("In test mode. Waiting for processing is disabled.");
@@ -96208,6 +96227,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger
);
return;
@@ -96225,6 +96245,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger
);
} else if (runStats) {
@@ -96237,6 +96258,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger
);
} else {
@@ -96249,6 +96271,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger
);
}

View File

@@ -128534,6 +128534,18 @@ function computeAutomationID(analysis_key, environment) {
}
return automationID;
}
async function listActionsCaches(key, ref) {
const repositoryNwo = getRepositoryNwo();
return await getApiClient().paginate(
"GET /repos/{owner}/{repo}/actions/caches",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
key,
ref
}
);
}
function wrapApiConfigurationError(e) {
if (isHTTPError(e)) {
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
@@ -128547,6 +128559,9 @@ function wrapApiConfigurationError(e) {
return e;
}
// src/caching-utils.ts
var core6 = __toESM(require_core());
// src/codeql.ts
var fs13 = __toESM(require("fs"));
var path13 = __toESM(require("path"));
@@ -128818,9 +128833,6 @@ var CodeQuality = {
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
};
// src/caching-utils.ts
var core6 = __toESM(require_core());
// src/config/db-config.ts
var semver2 = __toESM(require_semver2());
var PACK_IDENTIFIER_PATTERN = (function() {
@@ -131169,6 +131181,22 @@ var core11 = __toESM(require_core());
// src/dependency-caching.ts
var actionsCache3 = __toESM(require_cache3());
var glob = __toESM(require_glob3());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
async function getDependencyCacheUsage(logger) {
try {
const caches = await listActionsCaches(CODEQL_DEPENDENCY_CACHE_PREFIX);
const totalSize = caches.reduce(
(acc, cache) => acc + (cache.size_in_bytes ?? 0),
0
);
return { count: caches.length, size_bytes: totalSize };
} catch (err) {
logger.warning(
`Unable to retrieve information about dependency cache usage: ${getErrorMessage(err)}`
);
}
return void 0;
}
// src/analyze.ts
function dbIsFinalized(config, language, logger) {
@@ -133718,6 +133746,7 @@ async function runWrapper() {
const startedAt = /* @__PURE__ */ new Date();
let config;
let uploadFailedSarifResult;
let dependencyCachingUsage;
try {
restoreInputs();
const gitHubVersion = await getGitHubVersion();
@@ -133745,6 +133774,9 @@ async function runWrapper() {
features,
logger
);
if (await isAnalyzingDefaultBranch() && config.dependencyCachingEnabled !== "none" /* None */) {
dependencyCachingUsage = await getDependencyCacheUsage(logger);
}
}
} catch (unwrappedError) {
const error2 = wrapError(unwrappedError);
@@ -133778,7 +133810,8 @@ async function runWrapper() {
const statusReport = {
...statusReportBase,
...uploadFailedSarifResult,
job_status: getFinalJobStatus()
job_status: getFinalJobStatus(),
dependency_caching_usage: dependencyCachingUsage
};
logger.info("Sending status report for init-post step.");
await sendStatusReport(statusReport);

24
lib/init-action.js generated
View File

@@ -88184,7 +88184,7 @@ async function makeGlobber(patterns) {
return glob.create(patterns.join("\n"));
}
async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
const restoredCaches = [];
const status = [];
for (const language of languages) {
const cacheConfig = getDefaultCacheConfig()[language];
if (cacheConfig === void 0) {
@@ -88195,6 +88195,7 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
}
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, hit_kind: "no-hash" /* NoHash */ });
logger.info(
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
);
@@ -88209,19 +88210,23 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
", "
)}`
);
const start = performance.now();
const hitKey = await actionsCache3.restoreCache(
cacheConfig.paths,
primaryKey,
restoreKeys
);
const download_duration_ms = Math.round(performance.now() - start);
if (hitKey !== void 0) {
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
restoredCaches.push(language);
const hit_kind = hitKey === primaryKey ? "exact" /* Exact */ : "partial" /* Partial */;
status.push({ language, hit_kind, download_duration_ms });
} else {
status.push({ language, hit_kind: "miss" /* Miss */ });
logger.info(`No suitable cache found for ${language}.`);
}
}
return restoredCaches;
return status;
}
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
const hash = await glob.hashFiles(cacheConfig.hash.join("\n"));
@@ -90324,7 +90329,7 @@ async function sendStatusReport(statusReport) {
);
}
}
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats) {
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats, dependencyCachingResults) {
const languages = config.languages.join(",");
const paths = (config.originalUserInput.paths || []).join(",");
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(
@@ -90361,6 +90366,7 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
overlay_base_database_download_size_bytes: overlayBaseDatabaseStats?.databaseSizeBytes,
overlay_base_database_download_duration_ms: overlayBaseDatabaseStats?.databaseDownloadDurationMs,
dependency_caching_restore_results: dependencyCachingResults,
query_filters: JSON.stringify(
config.originalUserInput["query-filters"] ?? []
),
@@ -90543,7 +90549,7 @@ async function getWorkflowAbsolutePath(logger) {
}
// src/init-action.ts
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, logger, error2) {
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, dependencyCachingResults, logger, error2) {
const statusReportBase = await createStatusReportBase(
"init" /* Init */,
getActionsStatus(error2),
@@ -90580,7 +90586,8 @@ async function sendCompletedStatusReport(startedAt, config, configFile, toolsDow
Math.round(
await getTotalCacheSize(Object.values(config.trapCaches), logger)
),
overlayBaseDatabaseStats
overlayBaseDatabaseStats,
dependencyCachingResults
);
await sendStatusReport({
...initWithConfigStatusReport,
@@ -90744,6 +90751,7 @@ async function run() {
return;
}
let overlayBaseDatabaseStats;
let dependencyCachingResults;
try {
if (config.overlayDatabaseMode === "overlay" /* Overlay */ && config.useOverlayDatabaseCaching) {
overlayBaseDatabaseStats = await downloadOverlayBaseDatabaseFromCache(
@@ -90888,7 +90896,7 @@ exec ${goBinaryPath} "$@"`
codeql
);
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
await downloadDependencyCaches(
dependencyCachingResults = await downloadDependencyCaches(
config.languages,
logger,
minimizeJavaJars
@@ -90993,6 +91001,7 @@ exec ${goBinaryPath} "$@"`
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
logger,
error2
);
@@ -91010,6 +91019,7 @@ exec ${goBinaryPath} "$@"`
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
logger
);
}

View File

@@ -26,7 +26,10 @@ import {
isCodeScanningEnabled,
} from "./config-utils";
import { uploadDatabases } from "./database-upload";
import { uploadDependencyCaches } from "./dependency-caching";
import {
DependencyCacheUploadStatusReport,
uploadDependencyCaches,
} from "./dependency-caching";
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
import { EnvVar } from "./environment";
import { Feature, Features } from "./feature-flags";
@@ -55,10 +58,15 @@ interface AnalysisStatusReport
extends uploadLib.UploadStatusReport,
QueriesStatusReport {}
interface DependencyCachingUploadStatusReport {
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
}
interface FinishStatusReport
extends StatusReportBase,
DatabaseCreationTimings,
AnalysisStatusReport {}
AnalysisStatusReport,
DependencyCachingUploadStatusReport {}
interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
/** Size of TRAP caches that we uploaded, in bytes. */
@@ -76,6 +84,7 @@ async function sendStatusReport(
dbCreationTimings: DatabaseCreationTimings | undefined,
didUploadTrapCaches: boolean,
trapCacheCleanup: TrapCacheCleanupStatusReport | undefined,
dependencyCacheResults: DependencyCacheUploadStatusReport | undefined,
logger: Logger,
) {
const status = getActionsStatus(error, stats?.analyze_failure_language);
@@ -95,6 +104,7 @@ async function sendStatusReport(
...(stats || {}),
...(dbCreationTimings || {}),
...(trapCacheCleanup || {}),
dependency_caching_upload_results: dependencyCacheResults,
};
if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
@@ -209,6 +219,7 @@ async function run() {
let trapCacheUploadTime: number | undefined = undefined;
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
let didUploadTrapCaches = false;
let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined;
util.initializeEnvironment(actionsUtil.getActionVersion());
// Make inputs accessible in the `post` step, details at
@@ -388,7 +399,11 @@ async function run() {
Feature.JavaMinimizeDependencyJars,
codeql,
);
await uploadDependencyCaches(config, logger, minimizeJavaJars);
dependencyCacheResults = await uploadDependencyCaches(
config,
logger,
minimizeJavaJars,
);
}
// We don't upload results in test mode, so don't wait for processing
@@ -431,6 +446,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger,
);
return;
@@ -449,6 +465,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger,
);
} else if (runStats) {
@@ -461,6 +478,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger,
);
} else {
@@ -473,6 +491,7 @@ async function run() {
dbCreationTimings,
didUploadTrapCaches,
trapCacheCleanupTelemetry,
dependencyCacheResults,
logger,
);
}

View File

@@ -245,7 +245,7 @@ export interface ActionsCacheItem {
/** List all Actions cache entries matching the provided key and ref. */
export async function listActionsCaches(
key: string,
ref: string,
ref?: string,
): Promise<ActionsCacheItem[]> {
const repositoryNwo = getRepositoryNwo();

View File

@@ -5,12 +5,13 @@ import * as actionsCache from "@actions/cache";
import * as glob from "@actions/glob";
import { getTemporaryDirectory } from "./actions-util";
import { listActionsCaches } from "./api-client";
import { getTotalCacheSize } from "./caching-utils";
import { Config } from "./config-utils";
import { EnvVar } from "./environment";
import { KnownLanguage, Language } from "./languages";
import { Logger } from "./logging";
import { getRequiredEnvParam } from "./util";
import { getErrorMessage, getRequiredEnvParam } from "./util";
/**
* Caching configuration for a particular language.
@@ -84,20 +85,42 @@ async function makeGlobber(patterns: string[]): Promise<glob.Globber> {
return glob.create(patterns.join("\n"));
}
/** Enumerates possible outcomes for cache hits. */
export enum CacheHitKind {
/** We were unable to calculate a hash for the key. */
NoHash = "no-hash",
/** No cache was found. */
Miss = "miss",
/** The primary cache key matched. */
Exact = "exact",
/** A restore key matched. */
Partial = "partial",
}
/** Represents results of trying to restore a dependency cache for a language. */
export interface DependencyCacheRestoreStatus {
language: Language;
hit_kind: CacheHitKind;
download_duration_ms?: number;
}
/** An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration. */
export type DependencyCacheRestoreStatusReport = DependencyCacheRestoreStatus[];
/**
* Attempts to restore dependency caches for the languages being analyzed.
*
* @param languages The languages being analyzed.
* @param logger A logger to record some informational messages to.
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
* @returns A list of languages for which dependency caches were restored.
* @returns An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration.
*/
export async function downloadDependencyCaches(
languages: Language[],
logger: Logger,
minimizeJavaJars: boolean,
): Promise<Language[]> {
const restoredCaches: Language[] = [];
): Promise<DependencyCacheRestoreStatusReport> {
const status: DependencyCacheRestoreStatusReport = [];
for (const language of languages) {
const cacheConfig = getDefaultCacheConfig()[language];
@@ -114,6 +137,7 @@ export async function downloadDependencyCaches(
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, hit_kind: CacheHitKind.NoHash });
logger.info(
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
);
@@ -131,35 +155,66 @@ export async function downloadDependencyCaches(
)}`,
);
const start = performance.now();
const hitKey = await actionsCache.restoreCache(
cacheConfig.paths,
primaryKey,
restoreKeys,
);
const download_duration_ms = Math.round(performance.now() - start);
if (hitKey !== undefined) {
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
restoredCaches.push(language);
const hit_kind =
hitKey === primaryKey ? CacheHitKind.Exact : CacheHitKind.Partial;
status.push({ language, hit_kind, download_duration_ms });
} else {
status.push({ language, hit_kind: CacheHitKind.Miss });
logger.info(`No suitable cache found for ${language}.`);
}
}
return restoredCaches;
return status;
}
/** Enumerates possible outcomes for storing caches. */
export enum CacheStoreResult {
/** We were unable to calculate a hash for the key. */
NoHash = "no-hash",
/** There is nothing to store in the cache. */
Empty = "empty",
/** There already exists a cache with the key we are trying to store. */
Duplicate = "duplicate",
/** The cache was stored successfully. */
Stored = "stored",
}
/** Represents results of trying to upload a dependency cache for a language. */
export interface DependencyCacheUploadStatus {
language: Language;
result: CacheStoreResult;
upload_size_bytes?: number;
upload_duration_ms?: number;
}
/** An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration. */
export type DependencyCacheUploadStatusReport = DependencyCacheUploadStatus[];
/**
* Attempts to store caches for the languages that were analyzed.
*
* @param config The configuration for this workflow.
* @param logger A logger to record some informational messages to.
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
*
* @returns An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration.
*/
export async function uploadDependencyCaches(
config: Config,
logger: Logger,
minimizeJavaJars: boolean,
): Promise<void> {
): Promise<DependencyCacheUploadStatusReport> {
const status: DependencyCacheUploadStatusReport = [];
for (const language of config.languages) {
const cacheConfig = getDefaultCacheConfig()[language];
@@ -175,6 +230,7 @@ export async function uploadDependencyCaches(
const globber = await makeGlobber(cacheConfig.hash);
if ((await globber.glob()).length === 0) {
status.push({ language, result: CacheStoreResult.NoHash });
logger.info(
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
);
@@ -195,6 +251,7 @@ export async function uploadDependencyCaches(
// Skip uploading an empty cache.
if (size === 0) {
status.push({ language, result: CacheStoreResult.Empty });
logger.info(
`Skipping upload of dependency cache for ${language} since it is empty.`,
);
@@ -208,7 +265,16 @@ export async function uploadDependencyCaches(
);
try {
const start = performance.now();
await actionsCache.saveCache(cacheConfig.paths, key);
const upload_duration_ms = Math.round(performance.now() - start);
status.push({
language,
result: CacheStoreResult.Stored,
upload_size_bytes: Math.round(size),
upload_duration_ms,
});
} catch (error) {
// `ReserveCacheError` indicates that the cache key is already in use, which means that a
// cache with that key already exists or is in the process of being uploaded by another
@@ -218,12 +284,16 @@ export async function uploadDependencyCaches(
`Not uploading cache for ${language}, because ${key} is already in use.`,
);
logger.debug(error.message);
status.push({ language, result: CacheStoreResult.Duplicate });
} else {
// Propagate other errors upwards.
throw error;
}
}
}
return status;
}
/**
@@ -270,3 +340,34 @@ async function cachePrefix(
return `${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
}
/** Represents information about our overall cache usage for CodeQL dependency caches. */
export interface DependencyCachingUsageReport {
count: number;
size_bytes: number;
}
/**
* Tries to determine the overall cache usage for CodeQL dependencies caches.
*
* @param logger The logger to log errors to.
* @returns Returns the overall cache usage for CodeQL dependencies caches, or `undefined` if we couldn't determine it.
*/
export async function getDependencyCacheUsage(
logger: Logger,
): Promise<DependencyCachingUsageReport | undefined> {
try {
const caches = await listActionsCaches(CODEQL_DEPENDENCY_CACHE_PREFIX);
const totalSize = caches.reduce(
(acc, cache) => acc + (cache.size_in_bytes ?? 0),
0,
);
return { count: caches.length, size_bytes: totalSize };
} catch (err) {
logger.warning(
`Unable to retrieve information about dependency cache usage: ${getErrorMessage(err)}`,
);
}
return undefined;
}

View File

@@ -8,6 +8,7 @@ import { CodeScanning } from "./analyses";
import { getApiClient } from "./api-client";
import { CodeQL, getCodeQL } from "./codeql";
import { Config } from "./config-utils";
import * as dependencyCaching from "./dependency-caching";
import { EnvVar } from "./environment";
import { Feature, FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
@@ -45,6 +46,10 @@ export interface JobStatusReport {
job_status: JobStatus;
}
export interface DependencyCachingUsageReport {
dependency_caching_usage?: dependencyCaching.DependencyCachingUsageReport;
}
function createFailedUploadFailedSarifResult(
error: unknown,
): UploadFailedSarifResult {

View File

@@ -12,10 +12,16 @@ import {
printDebugLogs,
} from "./actions-util";
import { getGitHubVersion } from "./api-client";
import { CachingKind } from "./caching-utils";
import { getCodeQL } from "./codeql";
import { Config, getConfig } from "./config-utils";
import * as debugArtifacts from "./debug-artifacts";
import {
DependencyCachingUsageReport,
getDependencyCacheUsage,
} from "./dependency-caching";
import { Features } from "./feature-flags";
import * as gitUtils from "./git-utils";
import * as initActionPostHelper from "./init-action-post-helper";
import { getActionsLogger } from "./logging";
import { getRepositoryNwo } from "./repository";
@@ -32,7 +38,8 @@ import { checkDiskUsage, checkGitHubVersionInRange, wrapError } from "./util";
interface InitPostStatusReport
extends StatusReportBase,
initActionPostHelper.UploadFailedSarifResult,
initActionPostHelper.JobStatusReport {}
initActionPostHelper.JobStatusReport,
initActionPostHelper.DependencyCachingUsageReport {}
async function runWrapper() {
const logger = getActionsLogger();
@@ -41,6 +48,7 @@ async function runWrapper() {
let uploadFailedSarifResult:
| initActionPostHelper.UploadFailedSarifResult
| undefined;
let dependencyCachingUsage: DependencyCachingUsageReport | undefined;
try {
// Restore inputs from `init` Action.
restoreInputs();
@@ -73,6 +81,17 @@ async function runWrapper() {
features,
logger,
);
// If we are analysing the default branch and some kind of caching is enabled,
// then try to determine our overall cache usage for dependency caches. We only
// do this under these circumstances to avoid slowing down analyses for PRs
// and where caching may not be enabled.
if (
(await gitUtils.isAnalyzingDefaultBranch()) &&
config.dependencyCachingEnabled !== CachingKind.None
) {
dependencyCachingUsage = await getDependencyCacheUsage(logger);
}
}
} catch (unwrappedError) {
const error = wrapError(unwrappedError);
@@ -109,6 +128,7 @@ async function runWrapper() {
...statusReportBase,
...uploadFailedSarifResult,
job_status: initActionPostHelper.getFinalJobStatus(),
dependency_caching_usage: dependencyCachingUsage,
};
logger.info("Sending status report for init-post step.");
await sendStatusReport(statusReport);

View File

@@ -23,7 +23,10 @@ import {
} from "./caching-utils";
import { CodeQL } from "./codeql";
import * as configUtils from "./config-utils";
import { downloadDependencyCaches } from "./dependency-caching";
import {
DependencyCacheRestoreStatusReport,
downloadDependencyCaches,
} from "./dependency-caching";
import {
addDiagnostic,
flushDiagnostics,
@@ -102,6 +105,7 @@ async function sendCompletedStatusReport(
toolsSource: ToolsSource,
toolsVersion: string,
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
logger: Logger,
error?: Error,
) {
@@ -151,6 +155,7 @@ async function sendCompletedStatusReport(
await getTotalCacheSize(Object.values(config.trapCaches), logger),
),
overlayBaseDatabaseStats,
dependencyCachingResults,
);
await sendStatusReport({
...initWithConfigStatusReport,
@@ -351,6 +356,7 @@ async function run() {
}
let overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined;
let dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined;
try {
if (
config.overlayDatabaseMode === OverlayDatabaseMode.Overlay &&
@@ -562,7 +568,7 @@ async function run() {
codeql,
);
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
await downloadDependencyCaches(
dependencyCachingResults = await downloadDependencyCaches(
config.languages,
logger,
minimizeJavaJars,
@@ -714,6 +720,7 @@ async function run() {
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
logger,
error,
);
@@ -736,6 +743,7 @@ async function run() {
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
logger,
);
}

View File

@@ -286,6 +286,7 @@ const testCreateInitWithConfigStatusReport = test.macro({
undefined,
1024,
undefined,
undefined,
);
if (t.truthy(initWithConfigStatusReport)) {

View File

@@ -13,6 +13,7 @@ import {
} from "./actions-util";
import { getAnalysisKey, getApiClient } from "./api-client";
import { parseRegistriesWithoutCredentials, type Config } from "./config-utils";
import { DependencyCacheRestoreStatusReport } from "./dependency-caching";
import { DocUrl } from "./doc-url";
import { EnvVar } from "./environment";
import { getRef } from "./git-utils";
@@ -497,6 +498,8 @@ export interface InitWithConfigStatusReport extends InitStatusReport {
overlay_base_database_download_size_bytes?: number;
/** Time taken to download the overlay-base database, in milliseconds. */
overlay_base_database_download_duration_ms?: number;
/** Stringified JSON object representing information about the results of restoring dependency caches. */
dependency_caching_restore_results?: DependencyCacheRestoreStatusReport;
/** Stringified JSON array of registry configuration objects, from the 'registries' config field
or workflow input. **/
registries: string;
@@ -522,6 +525,7 @@ export async function createInitWithConfigStatusReport(
configFile: string | undefined,
totalCacheSize: number,
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
): Promise<InitWithConfigStatusReport> {
const languages = config.languages.join(",");
const paths = (config.originalUserInput.paths || []).join(",");
@@ -570,6 +574,7 @@ export async function createInitWithConfigStatusReport(
overlayBaseDatabaseStats?.databaseSizeBytes,
overlay_base_database_download_duration_ms:
overlayBaseDatabaseStats?.databaseDownloadDurationMs,
dependency_caching_restore_results: dependencyCachingResults,
query_filters: JSON.stringify(
config.originalUserInput["query-filters"] ?? [],
),