Compare commits

..

53 Commits

Author SHA1 Message Date
Henry Mercer
f67ec12472 Merge pull request #3370 from github/copilot/update-overlay-git-version-check
Add git version check for overlay analysis enablement
2025-12-18 15:47:40 +00:00
Henry Mercer
3b6fef64d5 Fix import order 2025-12-18 14:39:01 +00:00
Henry Mercer
8b428c0d4c Use EnvVar 2025-12-18 14:34:50 +00:00
Henry Mercer
034401b281 Merge branch 'main' into copilot/update-overlay-git-version-check 2025-12-18 14:06:01 +00:00
Henry Mercer
95246ce019 Prefer explicit env var to binary accessibility check 2025-12-18 14:05:12 +00:00
Henry Mercer
525b64847a Merge pull request #3374 from github/henrymercer/scan-debug-artifacts
CI: Perform a best-effort scan of the debug artifacts during release validation
2025-12-18 14:00:25 +00:00
Henry Mercer
a7e88a44f8 Only enable overlay for the code scanning suite 2025-12-18 13:06:44 +00:00
Henry Mercer
ff84c6f23c Improve comment 2025-12-18 13:03:52 +00:00
Henry Mercer
948c7fbf11 Test mode: Tolerate missing git binary 2025-12-18 13:01:00 +00:00
Henry Mercer
cec3cc5782 Trim git version output 2025-12-18 12:52:30 +00:00
Henry Mercer
358a55e232 Throw in test mode if can't compute git version 2025-12-18 12:52:13 +00:00
Henry Mercer
eb823a7a97 Merge pull request #3375 from github/henrymercer/overlay-upload-tools-feature
Require tools feature for uploading overlay DBs
2025-12-18 12:43:26 +00:00
Henry Mercer
003ddaeef5 Avoid non-determinism in PR checks due to overlay FFs 2025-12-18 12:35:06 +00:00
Henry Mercer
a2c3c8e3e2 Bump log level for failing to parse git version 2025-12-17 17:28:13 +00:00
Henry Mercer
a13b404670 Record both truncated and full git versions 2025-12-17 17:27:14 +00:00
Henry Mercer
a2917b0733 Check !== undefined rather than truthiness 2025-12-17 16:27:36 +00:00
Henry Mercer
67e683bd1b Report bundled DB size in error if known 2025-12-17 16:02:55 +00:00
Henry Mercer
cb26a026e5 Require tools feature for uploading overlay DBs 2025-12-17 16:02:26 +00:00
Henry Mercer
ac6c41b910 Extract zstd files too 2025-12-17 15:34:12 +00:00
Henry Mercer
056581e05b Update makeTelemetryDiagnostic doc 2025-12-17 12:15:37 +00:00
Henry Mercer
9c5588d006 Remove unnecessary stub restores 2025-12-17 12:12:04 +00:00
Henry Mercer
3765106c90 Move git version logging to config utils 2025-12-17 12:06:41 +00:00
Henry Mercer
e052dbd57d Remove caching mechanism 2025-12-17 11:56:23 +00:00
Henry Mercer
7673a2de65 Run testing Action using Node 24 2025-12-17 11:51:34 +00:00
Henry Mercer
32795b3c52 Merge branch 'main' into copilot/update-overlay-git-version-check 2025-12-17 11:49:32 +00:00
Henry Mercer
6b5763e5ee Skip slow test on Windows 2025-12-17 11:47:39 +00:00
Henry Mercer
3322491022 Bump timeout on Windows 2025-12-17 11:41:55 +00:00
Henry Mercer
6bc6217487 Merge branch 'main' into henrymercer/scan-debug-artifacts 2025-12-17 11:36:38 +00:00
Henry Mercer
faf6d35e7b Verify using post step 2025-12-17 11:35:26 +00:00
Henry Mercer
3b94cfeb15 Avoid logging each extract call 2025-12-17 11:35:26 +00:00
Henry Mercer
b88acb2f6c Merge pull request #3359 from github/dependabot/npm_and_yarn/npm-minor-b2e0062778
Bump the npm-minor group with 3 updates
2025-12-17 11:04:55 +00:00
Henry Mercer
241948c698 Merge branch 'main' into dependabot/npm_and_yarn/npm-minor-b2e0062778 2025-12-17 10:38:55 +00:00
Henry Mercer
da77f9f638 Suppress debug logs for artifact scanner test 2025-12-17 10:25:48 +00:00
Henry Mercer
de172624a1 Slim down test debug artifacts 2025-12-17 10:25:48 +00:00
Henry Mercer
488c1f1959 Add regression test for artifact scanner 2025-12-17 10:25:48 +00:00
Henry Mercer
f2ccf3b4f1 Ensure .gz files are extracted too 2025-12-17 10:25:47 +00:00
Henry Mercer
f28848a66a Use artifact scanner in debug artifacts PR checks 2025-12-17 10:25:47 +00:00
Henry Mercer
5459b98ca0 Add simple artifact scanner for tests only 2025-12-17 10:25:46 +00:00
Henry Mercer
0c8bfeaf84 Add artifact scanner 2025-12-17 10:25:46 +00:00
Henry Mercer
1fe89fe9cb Merge pull request #3368 from github/copilot/bump-actions-npm-packages
Bump @actions/* npm packages to latest versions
2025-12-17 09:59:27 +00:00
Henry Mercer
6dba00881c Merge pull request #3372 from github/mergeback/v4.31.9-to-main-5d4e8d1a
Mergeback v4.31.9 refs/heads/releases/v4 into main
2025-12-16 19:33:12 +00:00
github-actions[bot]
d4d47c0d3d Rebuild 2025-12-16 18:56:12 +00:00
github-actions[bot]
6c6e810910 Update changelog and version after v4.31.9 2025-12-16 18:32:18 +00:00
copilot-swe-agent[bot]
393c074965 Refactor existing telemetry diagnostics to use makeTelemetryDiagnostic
Refactored bundle-download-telemetry and zstd-availability diagnostics
in init-action.ts to use the new makeTelemetryDiagnostic helper function.
Also added guard for empty languages array in logGitVersionTelemetry.

Co-authored-by: henrymercer <14129055+henrymercer@users.noreply.github.com>
2025-12-16 17:24:57 +00:00
copilot-swe-agent[bot]
c3dc529aef Address feedback: cache git version, improve error handling, add telemetry
- Cache the git version to avoid recomputing on repeated calls
- Refactor getGitVersion to getGitVersionOrThrow with detailed errors
- Add getGitVersion that logs errors and handles caching
- Add makeTelemetryDiagnostic helper to diagnostics.ts
- Add logGitVersionTelemetry function to log git version telemetry
- Call logGitVersionTelemetry in init-action.ts
- Add resetCachedGitVersion for testing
- Update tests to work with new function signatures and caching

Co-authored-by: henrymercer <14129055+henrymercer@users.noreply.github.com>
2025-12-16 17:19:46 +00:00
copilot-swe-agent[bot]
fc2bbb041e Address code review feedback
- Add test for Windows-style git version format
- Add comment clarifying regex extracts major.minor.patch
- Replace dynamic import with static import for semver

Co-authored-by: henrymercer <14129055+henrymercer@users.noreply.github.com>
2025-12-16 16:27:41 +00:00
copilot-swe-agent[bot]
89753aa84b Add git version check for overlay analysis enablement
Overlay analysis depends on `getFileOidsUnderPath`, which uses
`git ls-files --format` option that requires Git 2.38.0+. This
change adds a check for the git version before enabling overlay
analysis.

Co-authored-by: henrymercer <14129055+henrymercer@users.noreply.github.com>
2025-12-16 16:22:23 +00:00
copilot-swe-agent[bot]
aff7998c4a Initial plan 2025-12-16 16:09:09 +00:00
Henry Mercer
7a5748cf0d Remove changelog note 2025-12-16 13:41:13 +00:00
copilot-swe-agent[bot]
db75d46248 Bump @actions/* npm packages to latest versions
Co-authored-by: henrymercer <14129055+henrymercer@users.noreply.github.com>
2025-12-16 13:34:51 +00:00
copilot-swe-agent[bot]
a0fc644617 Initial plan 2025-12-16 13:29:18 +00:00
github-actions[bot]
e1058e4d74 Rebuild 2025-12-15 17:03:33 +00:00
dependabot[bot]
d4f39b0766 Bump the npm-minor group with 3 updates
Bumps the npm-minor group with 3 updates: [@eslint/js](https://github.com/eslint/eslint/tree/HEAD/packages/js), [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) and [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser).


Updates `@eslint/js` from 9.39.1 to 9.39.2
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/commits/v9.39.2/packages/js)

Updates `@typescript-eslint/eslint-plugin` from 8.48.1 to 8.49.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/eslint-plugin/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.49.0/packages/eslint-plugin)

Updates `@typescript-eslint/parser` from 8.48.1 to 8.49.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.49.0/packages/parser)

---
updated-dependencies:
- dependency-name: "@eslint/js"
  dependency-version: 9.39.2
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: npm-minor
- dependency-name: "@typescript-eslint/eslint-plugin"
  dependency-version: 8.49.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: npm-minor
- dependency-name: "@typescript-eslint/parser"
  dependency-version: 8.49.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: npm-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-15 17:01:55 +00:00
38 changed files with 274901 additions and 192035 deletions

View File

@@ -0,0 +1,6 @@
name: Verify that the best-effort debug artifact scan completed
description: Verifies that the best-effort debug artifact scan completed successfully during tests
runs:
using: node24
main: index.js
post: post.js

View File

@@ -0,0 +1,2 @@
// The main step is a no-op, since we can only verify artifact scan completion in the post step.
console.log("Will verify artifact scan completion in the post step.");

View File

@@ -0,0 +1,11 @@
// Post step - runs after the workflow completes, when artifact scan has finished
const process = require("process");
const scanFinished = process.env.CODEQL_ACTION_ARTIFACT_SCAN_FINISHED;
if (scanFinished !== "true") {
console.error("Error: Best-effort artifact scan did not complete. Expected CODEQL_ACTION_ARTIFACT_SCAN_FINISHED=true");
process.exit(1);
}
console.log("✓ Best-effort artifact scan completed successfully");

View File

@@ -76,6 +76,7 @@ jobs:
- uses: ./../action/analyze - uses: ./../action/analyze
env: env:
https_proxy: http://squid-proxy:3128 https_proxy: http://squid-proxy:3128
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
CODEQL_ACTION_TEST_MODE: true CODEQL_ACTION_TEST_MODE: true
container: container:
image: ubuntu:22.04 image: ubuntu:22.04

View File

@@ -6,6 +6,11 @@ env:
# Diff informed queries add an additional query filter which is not yet # Diff informed queries add an additional query filter which is not yet
# taken into account by these tests. # taken into account by these tests.
CODEQL_ACTION_DIFF_INFORMED_QUERIES: false CODEQL_ACTION_DIFF_INFORMED_QUERIES: false
# Specify overlay enablement manually to ensure stability around the exclude-from-incremental
# query filter. Here we only enable for the default code scanning suite.
CODEQL_ACTION_OVERLAY_ANALYSIS: true
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
on: on:
push: push:

View File

@@ -58,6 +58,8 @@ jobs:
uses: actions/setup-dotnet@v5 uses: actions/setup-dotnet@v5
with: with:
dotnet-version: '9.x' dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init - uses: ./../action/init
with: with:
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -54,6 +54,8 @@ jobs:
uses: actions/setup-dotnet@v5 uses: actions/setup-dotnet@v5
with: with:
dotnet-version: '9.x' dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init - uses: ./../action/init
id: init id: init
with: with:

View File

@@ -2,6 +2,10 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs. See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## [UNRELEASED]
No user facing changes.
## 4.31.9 - 16 Dec 2025 ## 4.31.9 - 16 Dec 2025
No user facing changes. No user facing changes.

43325
lib/analyze-action-post.js generated

File diff suppressed because it is too large Load Diff

37495
lib/analyze-action.js generated

File diff suppressed because it is too large Load Diff

36154
lib/autobuild-action.js generated

File diff suppressed because it is too large Load Diff

43454
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

37601
lib/init-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

36146
lib/setup-codeql-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

36481
lib/start-proxy-action.js generated

File diff suppressed because it is too large Load Diff

36190
lib/upload-lib.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

36164
lib/upload-sarif-action.js generated

File diff suppressed because it is too large Load Diff

984
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "codeql", "name": "codeql",
"version": "4.31.9", "version": "4.31.10",
"private": true, "private": true,
"description": "CodeQL action", "description": "CodeQL action",
"scripts": { "scripts": {
@@ -24,12 +24,12 @@
}, },
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/artifact": "^4.0.0", "@actions/artifact": "^5.0.1",
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2", "@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
"@actions/cache": "^4.1.0", "@actions/cache": "^5.0.1",
"@actions/core": "^1.11.1", "@actions/core": "^2.0.1",
"@actions/exec": "^1.1.1", "@actions/exec": "^2.0.0",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.1",
"@actions/glob": "^0.5.0", "@actions/glob": "^0.5.0",
"@actions/http-client": "^3.0.0", "@actions/http-client": "^3.0.0",
"@actions/io": "^2.0.0", "@actions/io": "^2.0.0",
@@ -51,7 +51,7 @@
"@ava/typescript": "6.0.0", "@ava/typescript": "6.0.0",
"@eslint/compat": "^2.0.0", "@eslint/compat": "^2.0.0",
"@eslint/eslintrc": "^3.3.3", "@eslint/eslintrc": "^3.3.3",
"@eslint/js": "^9.39.1", "@eslint/js": "^9.39.2",
"@microsoft/eslint-formatter-sarif": "^3.1.0", "@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^16.0.0", "@octokit/types": "^16.0.0",
"@types/archiver": "^7.0.0", "@types/archiver": "^7.0.0",
@@ -61,7 +61,7 @@
"@types/node-forge": "^1.3.14", "@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1", "@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0", "@types/sinon": "^21.0.0",
"@typescript-eslint/eslint-plugin": "^8.48.1", "@typescript-eslint/eslint-plugin": "^8.49.0",
"@typescript-eslint/parser": "^8.48.0", "@typescript-eslint/parser": "^8.48.0",
"ava": "^6.4.1", "ava": "^6.4.1",
"esbuild": "^0.27.1", "esbuild": "^0.27.1",

View File

@@ -23,6 +23,7 @@ services:
- 3128:3128 - 3128:3128
env: env:
https_proxy: http://squid-proxy:3128 https_proxy: http://squid-proxy:3128
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
steps: steps:
- uses: ./../action/init - uses: ./../action/init
with: with:

View File

@@ -0,0 +1,98 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import test from "ava";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { getRunnerLogger } from "./logging";
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file with a fake GitHub token
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file with token ghp_1234567890123456789012345678901234AB",
);
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
t.regex(
error?.message || "",
/Found 1 potential GitHub token.*Personal Access Token/,
);
t.regex(error?.message || "", /test\.txt/);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
test("scanArtifactsForTokens handles files without tokens", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file without tokens
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file without any sensitive data",
);
await t.notThrowsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
if (os.platform() !== "win32") {
test("scanArtifactsForTokens finds token in debug artifacts", async (t) => {
t.timeout(15000); // 15 seconds
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages, { logToConsole: false });
// The zip here is a regression test based on
// https://github.com/github/codeql-action/security/advisories/GHSA-vqf5-2xx6-9wfm
const testZip = path.join(
__dirname,
"..",
"src",
"testdata",
"debug-artifacts-with-fake-token.zip",
);
// This zip file contains a nested structure with a fake token in:
// my-db-java-partial.zip/trap/java/invocations/kotlin.9017231652989744319.trap
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testZip], logger),
);
t.regex(
error?.message || "",
/Found.*potential GitHub token/,
"Should detect token in nested zip",
);
t.regex(
error?.message || "",
/kotlin\.9017231652989744319\.trap/,
"Should report the .trap file containing the token",
);
const logOutput = messages.map((msg) => msg.message).join("\n");
t.regex(
logOutput,
/^Extracting gz file: .*\.gz$/m,
"Logs should show that .gz files were extracted",
);
});
}

379
src/artifact-scanner.ts Normal file
View File

@@ -0,0 +1,379 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as exec from "@actions/exec";
import { Logger } from "./logging";
import { getErrorMessage } from "./util";
/**
* GitHub token patterns to scan for.
* These patterns match various GitHub token formats.
*/
const GITHUB_TOKEN_PATTERNS = [
{
name: "Personal Access Token",
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
},
{
name: "OAuth Access Token",
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
},
{
name: "User-to-Server Token",
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Server-to-Server Token",
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Refresh Token",
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
},
{
name: "App Installation Access Token",
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
},
];
interface TokenFinding {
tokenType: string;
filePath: string;
}
interface ScanResult {
scannedFiles: number;
findings: TokenFinding[];
}
/**
* Scans a file for GitHub tokens.
*
* @param filePath Path to the file to scan
* @param relativePath Relative path for display purposes
* @param logger Logger instance
* @returns Array of token findings in the file
*/
function scanFileForTokens(
filePath: string,
relativePath: string,
logger: Logger,
): TokenFinding[] {
const findings: TokenFinding[] = [];
try {
const content = fs.readFileSync(filePath, "utf8");
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
const matches = content.match(pattern);
if (matches) {
for (let i = 0; i < matches.length; i++) {
findings.push({ tokenType: name, filePath: relativePath });
}
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
}
}
return findings;
} catch (e) {
// If we can't read the file as text, it's likely binary or inaccessible
logger.debug(
`Could not scan file ${filePath} for tokens: ${getErrorMessage(e)}`,
);
return [];
}
}
/**
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param archivePath Path to the archive file
* @param relativeArchivePath Relative path of the archive for display
* @param extractDir Directory to extract to
* @param logger Logger instance
* @param depth Current recursion depth (to prevent infinite loops)
* @returns Scan results
*/
async function scanArchiveFile(
archivePath: string,
relativeArchivePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const MAX_DEPTH = 10; // Prevent infinite recursion
if (depth > MAX_DEPTH) {
throw new Error(
`Maximum archive extraction depth (${MAX_DEPTH}) reached for ${archivePath}`,
);
}
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
try {
const tempExtractDir = fs.mkdtempSync(
path.join(extractDir, `extract-${depth}-`),
);
// Determine archive type and extract accordingly
const fileName = path.basename(archivePath).toLowerCase();
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
// Extract tar.gz files
logger.debug(`Extracting tar.gz file: ${archivePath}`);
await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], {
silent: true,
});
} else if (fileName.endsWith(".tar.zst")) {
// Extract tar.zst files
logger.debug(`Extracting tar.zst file: ${archivePath}`);
await exec.exec(
"tar",
["--zstd", "-xf", archivePath, "-C", tempExtractDir],
{
silent: true,
},
);
} else if (fileName.endsWith(".zst")) {
// Extract .zst files (single file compression)
logger.debug(`Extracting zst file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".zst"),
);
await exec.exec("zstd", ["-d", archivePath, "-o", outputFile], {
silent: true,
});
} else if (fileName.endsWith(".gz")) {
// Extract .gz files (single file compression)
logger.debug(`Extracting gz file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".gz"),
);
await exec.exec("gunzip", ["-c", archivePath], {
outStream: fs.createWriteStream(outputFile),
silent: true,
});
} else if (fileName.endsWith(".zip")) {
// Extract zip files
logger.debug(`Extracting zip file: ${archivePath}`);
await exec.exec(
"unzip",
["-q", "-o", archivePath, "-d", tempExtractDir],
{
silent: true,
},
);
}
// Scan the extracted contents
const scanResult = await scanDirectory(
tempExtractDir,
relativeArchivePath,
logger,
depth + 1,
);
result.scannedFiles += scanResult.scannedFiles;
result.findings.push(...scanResult.findings);
// Clean up extracted files
fs.rmSync(tempExtractDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}`,
);
}
return result;
}
/**
* Scans a single file, including recursive archive extraction if applicable.
*
* @param fullPath Full path to the file
* @param relativePath Relative path for display
* @param extractDir Directory to use for extraction (for archive files)
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanFile(
fullPath: string,
relativePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 1,
findings: [],
};
// Check if it's an archive file and recursively scan it
const fileName = path.basename(fullPath).toLowerCase();
const isArchive =
fileName.endsWith(".zip") ||
fileName.endsWith(".tar.gz") ||
fileName.endsWith(".tgz") ||
fileName.endsWith(".tar.zst") ||
fileName.endsWith(".zst") ||
fileName.endsWith(".gz");
if (isArchive) {
const archiveResult = await scanArchiveFile(
fullPath,
relativePath,
extractDir,
logger,
depth,
);
result.scannedFiles += archiveResult.scannedFiles;
result.findings.push(...archiveResult.findings);
}
// Scan the file itself for tokens (unless it's a pure binary archive format)
const fileFindings = scanFileForTokens(fullPath, relativePath, logger);
result.findings.push(...fileFindings);
return result;
}
/**
* Recursively scans a directory for GitHub tokens.
*
* @param dirPath Directory path to scan
* @param baseRelativePath Base relative path for computing display paths
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanDirectory(
dirPath: string,
baseRelativePath: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.join(baseRelativePath, entry.name);
if (entry.isDirectory()) {
const subResult = await scanDirectory(
fullPath,
relativePath,
logger,
depth,
);
result.scannedFiles += subResult.scannedFiles;
result.findings.push(...subResult.findings);
} else if (entry.isFile()) {
const fileResult = await scanFile(
fullPath,
relativePath,
path.dirname(fullPath),
logger,
depth,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
return result;
}
/**
* Scans a list of files and directories for GitHub tokens.
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param filesToScan List of file paths to scan
* @param logger Logger instance
* @returns Scan results
*/
export async function scanArtifactsForTokens(
filesToScan: string[],
logger: Logger,
): Promise<void> {
logger.info(
"Starting best-effort check for potential GitHub tokens in debug artifacts (for testing purposes only)...",
);
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
// Create a temporary directory for extraction
const tempScanDir = fs.mkdtempSync(path.join(os.tmpdir(), "artifact-scan-"));
try {
for (const filePath of filesToScan) {
const stats = fs.statSync(filePath);
const fileName = path.basename(filePath);
if (stats.isDirectory()) {
const dirResult = await scanDirectory(filePath, fileName, logger);
result.scannedFiles += dirResult.scannedFiles;
result.findings.push(...dirResult.findings);
} else if (stats.isFile()) {
const fileResult = await scanFile(
filePath,
fileName,
tempScanDir,
logger,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
// Compute statistics from findings
const tokenTypesCounts = new Map<string, number>();
const filesWithTokens = new Set<string>();
for (const finding of result.findings) {
tokenTypesCounts.set(
finding.tokenType,
(tokenTypesCounts.get(finding.tokenType) || 0) + 1,
);
filesWithTokens.add(finding.filePath);
}
const tokenTypesSummary = Array.from(tokenTypesCounts.entries())
.map(([type, count]) => `${count} ${type}${count > 1 ? "s" : ""}`)
.join(", ");
const baseSummary = `scanned ${result.scannedFiles} files, found ${result.findings.length} potential token(s) in ${filesWithTokens.size} file(s)`;
const summaryWithTypes = tokenTypesSummary
? `${baseSummary} (${tokenTypesSummary})`
: baseSummary;
logger.info(`Artifact check complete: ${summaryWithTypes}`);
if (result.findings.length > 0) {
const fileList = Array.from(filesWithTokens).join(", ");
throw new Error(
`Found ${result.findings.length} potential GitHub token(s) (${tokenTypesSummary}) in debug artifacts at: ${fileList}. This is a best-effort check for testing purposes only.`,
);
}
} finally {
// Clean up temporary directory
try {
fs.rmSync(tempScanDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not clean up temporary scan directory: ${getErrorMessage(e)}`,
);
}
}
}

View File

@@ -15,6 +15,7 @@ import * as configUtils from "./config-utils";
import * as errorMessages from "./error-messages"; import * as errorMessages from "./error-messages";
import { Feature } from "./feature-flags"; import { Feature } from "./feature-flags";
import * as gitUtils from "./git-utils"; import * as gitUtils from "./git-utils";
import { GitVersionInfo } from "./git-utils";
import { KnownLanguage, Language } from "./languages"; import { KnownLanguage, Language } from "./languages";
import { getRunnerLogger } from "./logging"; import { getRunnerLogger } from "./logging";
import { import {
@@ -978,6 +979,7 @@ interface OverlayDatabaseModeTestSetup {
languages: Language[]; languages: Language[];
codeqlVersion: string; codeqlVersion: string;
gitRoot: string | undefined; gitRoot: string | undefined;
gitVersion: GitVersionInfo | undefined;
codeScanningConfig: configUtils.UserConfig; codeScanningConfig: configUtils.UserConfig;
diskUsage: DiskUsage | undefined; diskUsage: DiskUsage | undefined;
memoryFlagValue: number; memoryFlagValue: number;
@@ -992,6 +994,10 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
languages: [KnownLanguage.javascript], languages: [KnownLanguage.javascript],
codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION, codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION,
gitRoot: "/some/git/root", gitRoot: "/some/git/root",
gitVersion: new GitVersionInfo(
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
),
codeScanningConfig: {}, codeScanningConfig: {},
diskUsage: { diskUsage: {
numAvailableBytes: 50_000_000_000, numAvailableBytes: 50_000_000_000,
@@ -1070,6 +1076,7 @@ const getOverlayDatabaseModeMacro = test.macro({
setup.buildMode, setup.buildMode,
undefined, undefined,
setup.codeScanningConfig, setup.codeScanningConfig,
setup.gitVersion,
logger, logger,
); );
@@ -1773,6 +1780,32 @@ test(
}, },
); );
test(
getOverlayDatabaseModeMacro,
"Fallback due to old git version",
{
overlayDatabaseEnvVar: "overlay",
gitVersion: new GitVersionInfo("2.30.0", "2.30.0"), // Version below required 2.38.0
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"Fallback when git version cannot be determined",
{
overlayDatabaseEnvVar: "overlay",
gitVersion: undefined,
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
// Exercise language-specific overlay analysis features code paths // Exercise language-specific overlay analysis features code paths
for (const language in KnownLanguage) { for (const language in KnownLanguage) {
test( test(

View File

@@ -22,11 +22,19 @@ import {
parseUserConfig, parseUserConfig,
UserConfig, UserConfig,
} from "./config/db-config"; } from "./config/db-config";
import { addDiagnostic, makeTelemetryDiagnostic } from "./diagnostics";
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils"; import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
import { EnvVar } from "./environment";
import * as errorMessages from "./error-messages"; import * as errorMessages from "./error-messages";
import { Feature, FeatureEnablement } from "./feature-flags"; import { Feature, FeatureEnablement } from "./feature-flags";
import { RepositoryProperties } from "./feature-flags/properties"; import { RepositoryProperties } from "./feature-flags/properties";
import { getGitRoot, isAnalyzingDefaultBranch } from "./git-utils"; import {
getGitRoot,
getGitVersionOrThrow,
GIT_MINIMUM_VERSION_FOR_OVERLAY,
GitVersionInfo,
isAnalyzingDefaultBranch,
} from "./git-utils";
import { KnownLanguage, Language } from "./languages"; import { KnownLanguage, Language } from "./languages";
import { Logger } from "./logging"; import { Logger } from "./logging";
import { import {
@@ -45,6 +53,8 @@ import {
isDefined, isDefined,
checkDiskUsage, checkDiskUsage,
getCodeQLMemoryLimit, getCodeQLMemoryLimit,
getErrorMessage,
isInTestMode,
} from "./util"; } from "./util";
export * from "./config/db-config"; export * from "./config/db-config";
@@ -709,6 +719,7 @@ export async function getOverlayDatabaseMode(
buildMode: BuildMode | undefined, buildMode: BuildMode | undefined,
ramInput: string | undefined, ramInput: string | undefined,
codeScanningConfig: UserConfig, codeScanningConfig: UserConfig,
gitVersion: GitVersionInfo | undefined,
logger: Logger, logger: Logger,
): Promise<{ ): Promise<{
overlayDatabaseMode: OverlayDatabaseMode; overlayDatabaseMode: OverlayDatabaseMode;
@@ -811,6 +822,22 @@ export async function getOverlayDatabaseMode(
); );
return nonOverlayAnalysis; return nonOverlayAnalysis;
} }
if (gitVersion === undefined) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because ` +
"the Git version could not be determined. " +
"Falling back to creating a normal full database instead.",
);
return nonOverlayAnalysis;
}
if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because ` +
`the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. ` +
"Falling back to creating a normal full database instead.",
);
return nonOverlayAnalysis;
}
return { return {
overlayDatabaseMode, overlayDatabaseMode,
@@ -903,6 +930,24 @@ export async function initConfig(
config.computedConfig["query-filters"] = []; config.computedConfig["query-filters"] = [];
} }
let gitVersion: GitVersionInfo | undefined = undefined;
try {
gitVersion = await getGitVersionOrThrow();
logger.info(`Using Git version ${gitVersion.fullVersion}`);
await logGitVersionTelemetry(config, gitVersion);
} catch (e) {
logger.warning(`Could not determine Git version: ${getErrorMessage(e)}`);
// Throw the error in test mode so it's more visible, unless the environment
// variable is set to tolerate this, for example because we're running in a
// Docker container where git may not be available.
if (
isInTestMode() &&
process.env[EnvVar.TOLERATE_MISSING_GIT_VERSION] !== "true"
) {
throw e;
}
}
// The choice of overlay database mode depends on the selection of languages // The choice of overlay database mode depends on the selection of languages
// and queries, which in turn depends on the user config and the augmentation // and queries, which in turn depends on the user config and the augmentation
// properties. So we need to calculate the overlay database mode after the // properties. So we need to calculate the overlay database mode after the
@@ -916,6 +961,7 @@ export async function initConfig(
config.buildMode, config.buildMode,
inputs.ramInput, inputs.ramInput,
config.computedConfig, config.computedConfig,
gitVersion,
logger, logger,
); );
logger.info( logger.info(
@@ -1316,3 +1362,26 @@ export function getPrimaryAnalysisConfig(config: Config): AnalysisConfig {
? CodeScanning ? CodeScanning
: CodeQuality; : CodeQuality;
} }
/** Logs the Git version as a telemetry diagnostic. */
async function logGitVersionTelemetry(
config: Config,
gitVersion: GitVersionInfo,
): Promise<void> {
if (config.languages.length > 0) {
addDiagnostic(
config,
// Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data.
config.languages[0],
makeTelemetryDiagnostic(
"codeql-action/git-version-telemetry",
"Git version telemetry",
{
fullVersion: gitVersion.fullVersion,
truncatedVersion: gitVersion.truncatedVersion,
},
),
);
}
}

View File

@@ -95,13 +95,14 @@ export async function cleanupAndUploadDatabases(
const reports: DatabaseUploadResult[] = []; const reports: DatabaseUploadResult[] = [];
for (const language of config.languages) { for (const language of config.languages) {
let bundledDbSize: number | undefined = undefined;
try { try {
// Upload the database bundle. // Upload the database bundle.
// Although we are uploading arbitrary file contents to the API, it's worth // Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable. // noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo. // This API method is available to anyone with write access to the repo.
const bundledDb = await bundleDb(config, language, codeql, language); const bundledDb = await bundleDb(config, language, codeql, language);
const bundledDbSize = fs.statSync(bundledDb).size; bundledDbSize = fs.statSync(bundledDb).size;
const bundledDbReadStream = fs.createReadStream(bundledDb); const bundledDbReadStream = fs.createReadStream(bundledDb);
const commitOid = await gitUtils.getCommitOid( const commitOid = await gitUtils.getCommitOid(
actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("checkout_path"),
@@ -144,6 +145,9 @@ export async function cleanupAndUploadDatabases(
reports.push({ reports.push({
language, language,
error: util.getErrorMessage(e), error: util.getErrorMessage(e),
...(bundledDbSize !== undefined
? { zipped_upload_size_bytes: bundledDbSize }
: {}),
}); });
} }
} }

View File

@@ -8,6 +8,7 @@ import archiver from "archiver";
import { getOptionalInput, getTemporaryDirectory } from "./actions-util"; import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
import { dbIsFinalized } from "./analyze"; import { dbIsFinalized } from "./analyze";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { type CodeQL } from "./codeql"; import { type CodeQL } from "./codeql";
import { Config } from "./config-utils"; import { Config } from "./config-utils";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
@@ -23,6 +24,7 @@ import {
getCodeQLDatabasePath, getCodeQLDatabasePath,
getErrorMessage, getErrorMessage,
GitHubVariant, GitHubVariant,
isInTestMode,
listFolder, listFolder,
} from "./util"; } from "./util";
@@ -269,6 +271,14 @@ export async function uploadDebugArtifacts(
return "upload-not-supported"; return "upload-not-supported";
} }
// When running in test mode, perform a best effort scan of the debug artifacts. The artifact
// scanner is basic and not reliable or fast enough for production use, but it can help catch
// some issues early.
if (isInTestMode()) {
await scanArtifactsForTokens(toUpload, logger);
core.exportVariable("CODEQL_ACTION_ARTIFACT_SCAN_FINISHED", "true");
}
let suffix = ""; let suffix = "";
const matrix = getOptionalInput("matrix"); const matrix = getOptionalInput("matrix");
if (matrix) { if (matrix) {

View File

@@ -185,3 +185,27 @@ export function flushDiagnostics(config: Config) {
// Reset the unwritten diagnostics array. // Reset the unwritten diagnostics array.
unwrittenDiagnostics = []; unwrittenDiagnostics = [];
} }
/**
* Creates a telemetry-only diagnostic message. This is a convenience function
* for creating diagnostics that should only be sent to telemetry and not
* displayed on the status page or CLI summary table.
*
* @param id An identifier under which it makes sense to group this diagnostic message
* @param name Display name
* @param attributes Structured metadata
*/
export function makeTelemetryDiagnostic(
id: string,
name: string,
attributes: { [key: string]: any },
): DiagnosticMessage {
return makeDiagnostic(id, name, {
attributes,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
});
}

View File

@@ -129,4 +129,10 @@ export enum EnvVar {
* the workflow is valid and validation is not necessary. * the workflow is valid and validation is not necessary.
*/ */
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION", SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
/**
* Whether to tolerate failure to determine the git version (only applicable in test mode).
* Intended for use in environments where git may not be installed, such as Docker containers.
*/
TOLERATE_MISSING_GIT_VERSION = "CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION",
} }

View File

@@ -293,6 +293,7 @@ export const featureConfig: Record<
defaultValue: false, defaultValue: false,
envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API", envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API",
minimumVersion: undefined, minimumVersion: undefined,
toolsFeature: ToolsFeature.BundleSupportsOverlay,
}, },
[Feature.UseRepositoryProperties]: { [Feature.UseRepositoryProperties]: {
defaultValue: false, defaultValue: false,

View File

@@ -1,4 +1,5 @@
import * as fs from "fs"; import * as fs from "fs";
import * as os from "os";
import * as path from "path"; import * as path from "path";
import * as core from "@actions/core"; import * as core from "@actions/core";
@@ -315,27 +316,23 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => {
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
); );
try { const result = await gitUtils.getFileOidsUnderPath("/fake/path");
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, { t.deepEqual(result, {
"lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18", "lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18",
"lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827", "lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827",
"src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96", "src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
}); });
t.deepEqual(runGitCommandStub.firstCall.args, [ t.deepEqual(runGitCommandStub.firstCall.args, [
"/fake/path", "/fake/path",
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
"Cannot list Git OIDs of tracked files.", "Cannot list Git OIDs of tracked files.",
]); ]);
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath handles quoted paths", async (t) => { test("getFileOidsUnderPath handles quoted paths", async (t) => {
const runGitCommandStub = sinon sinon
.stub(gitUtils as any, "runGitCommand") .stub(gitUtils as any, "runGitCommand")
.resolves( .resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" + "30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" +
@@ -343,34 +340,24 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => {
'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"', 'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"',
); );
try { const result = await gitUtils.getFileOidsUnderPath("/fake/path");
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, { t.deepEqual(result, {
"lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18", "lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18",
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827", "lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96", "lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
}); });
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath handles empty output", async (t) => { test("getFileOidsUnderPath handles empty output", async (t) => {
const runGitCommandStub = sinon sinon.stub(gitUtils as any, "runGitCommand").resolves("");
.stub(gitUtils as any, "runGitCommand")
.resolves("");
try { const result = await gitUtils.getFileOidsUnderPath("/fake/path");
const result = await gitUtils.getFileOidsUnderPath("/fake/path"); t.deepEqual(result, {});
t.deepEqual(result, {});
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath throws on unexpected output format", async (t) => { test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
const runGitCommandStub = sinon sinon
.stub(gitUtils as any, "runGitCommand") .stub(gitUtils as any, "runGitCommand")
.resolves( .resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" + "30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
@@ -378,17 +365,73 @@ test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
); );
try { await t.throwsAsync(
await t.throwsAsync( async () => {
async () => { await gitUtils.getFileOidsUnderPath("/fake/path");
await gitUtils.getFileOidsUnderPath("/fake/path"); },
}, {
{ instanceOf: Error,
instanceOf: Error, message: 'Unexpected "git ls-files" output: invalid-line-format',
message: 'Unexpected "git ls-files" output: invalid-line-format', },
}, );
); });
} finally {
runGitCommandStub.restore(); test("getGitVersionOrThrow returns version for valid git output", async (t) => {
} sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(`git version 2.40.0${os.EOL}`);
const version = await gitUtils.getGitVersionOrThrow();
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0");
});
test("getGitVersionOrThrow throws for invalid git output", async (t) => {
sinon.stub(gitUtils as any, "runGitCommand").resolves("invalid output");
await t.throwsAsync(
async () => {
await gitUtils.getGitVersionOrThrow();
},
{
instanceOf: Error,
message: "Could not parse Git version from output: invalid output",
},
);
});
test("getGitVersionOrThrow handles Windows-style git output", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves("git version 2.40.0.windows.1");
const version = await gitUtils.getGitVersionOrThrow();
// The truncated version should contain just the major.minor.patch portion
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0.windows.1");
});
test("getGitVersionOrThrow throws when git command fails", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.rejects(new Error("git not found"));
await t.throwsAsync(
async () => {
await gitUtils.getGitVersionOrThrow();
},
{
instanceOf: Error,
message: "git not found",
},
);
});
test("GitVersionInfo.isAtLeast correctly compares versions", async (t) => {
const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0");
t.true(version.isAtLeast("2.38.0"));
t.true(version.isAtLeast("2.40.0"));
t.false(version.isAtLeast("2.41.0"));
t.false(version.isAtLeast("3.0.0"));
}); });

View File

@@ -1,6 +1,7 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as io from "@actions/io"; import * as io from "@actions/io";
import * as semver from "semver";
import { import {
getOptionalInput, getOptionalInput,
@@ -9,6 +10,52 @@ import {
} from "./actions-util"; } from "./actions-util";
import { ConfigurationError, getRequiredEnvParam } from "./util"; import { ConfigurationError, getRequiredEnvParam } from "./util";
/**
* Minimum Git version required for overlay analysis. The `git ls-files --format`
* option, which is used by `getFileOidsUnderPath`, was introduced in Git 2.38.0.
*/
export const GIT_MINIMUM_VERSION_FOR_OVERLAY = "2.38.0";
/**
* Git version information
*
* The full version string as reported by `git --version` may not be
* semver-compatible (e.g., "2.40.0.windows.1"). This class captures both
* the full version string and a truncated semver-compatible version string
* (e.g., "2.40.0").
*/
export class GitVersionInfo {
constructor(
/** Truncated semver-compatible version */
public truncatedVersion: string,
/** Full version string as reported by `git --version` */
public fullVersion: string,
) {}
isAtLeast(minVersion: string): boolean {
return semver.gte(this.truncatedVersion, minVersion);
}
}
/**
* Gets the version of Git installed on the system and throws an error if
* the version cannot be determined.
*/
export async function getGitVersionOrThrow(): Promise<GitVersionInfo> {
const stdout = await runGitCommand(
undefined,
["--version"],
"Failed to get git version.",
);
// Git version output can vary: "git version 2.40.0" or "git version 2.40.0.windows.1"
// We capture just the major.minor.patch portion to ensure semver compatibility.
const match = stdout.trim().match(/^git version ((\d+\.\d+\.\d+).*)$/);
if (match?.[1] && match?.[2]) {
return new GitVersionInfo(match[2], match[1]);
}
throw new Error(`Could not parse Git version from output: ${stdout.trim()}`);
}
export const runGitCommand = async function ( export const runGitCommand = async function (
workingDirectory: string | undefined, workingDirectory: string | undefined,
args: string[], args: string[],

View File

@@ -33,6 +33,7 @@ import {
flushDiagnostics, flushDiagnostics,
logUnwrittenDiagnostics, logUnwrittenDiagnostics,
makeDiagnostic, makeDiagnostic,
makeTelemetryDiagnostic,
} from "./diagnostics"; } from "./diagnostics";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
import { Feature, Features } from "./feature-flags"; import { Feature, Features } from "./feature-flags";
@@ -425,17 +426,10 @@ async function run() {
// Arbitrarily choose the first language. We could also choose all languages, but that // Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data. // increases the risk of misinterpreting the data.
config.languages[0], config.languages[0],
makeDiagnostic( makeTelemetryDiagnostic(
"codeql-action/bundle-download-telemetry", "codeql-action/bundle-download-telemetry",
"CodeQL bundle download telemetry", "CodeQL bundle download telemetry",
{ toolsDownloadStatusReport,
attributes: toolsDownloadStatusReport,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
},
), ),
); );
} }
@@ -794,17 +788,10 @@ async function recordZstdAvailability(
// Arbitrarily choose the first language. We could also choose all languages, but that // Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data. // increases the risk of misinterpreting the data.
config.languages[0], config.languages[0],
makeDiagnostic( makeTelemetryDiagnostic(
"codeql-action/zstd-availability", "codeql-action/zstd-availability",
"Zstandard availability", "Zstandard availability",
{ zstdAvailability,
attributes: zstdAvailability,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
},
), ),
); );
} }

Binary file not shown.

View File

@@ -152,27 +152,38 @@ export interface LoggedMessage {
message: string | Error; message: string | Error;
} }
export function getRecordingLogger(messages: LoggedMessage[]): Logger { export function getRecordingLogger(
messages: LoggedMessage[],
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
): Logger {
return { return {
debug: (message: string) => { debug: (message: string) => {
messages.push({ type: "debug", message }); messages.push({ type: "debug", message });
// eslint-disable-next-line no-console if (logToConsole) {
console.debug(message); // eslint-disable-next-line no-console
console.debug(message);
}
}, },
info: (message: string) => { info: (message: string) => {
messages.push({ type: "info", message }); messages.push({ type: "info", message });
// eslint-disable-next-line no-console if (logToConsole) {
console.info(message); // eslint-disable-next-line no-console
console.info(message);
}
}, },
warning: (message: string | Error) => { warning: (message: string | Error) => {
messages.push({ type: "warning", message }); messages.push({ type: "warning", message });
// eslint-disable-next-line no-console if (logToConsole) {
console.warn(message); // eslint-disable-next-line no-console
console.warn(message);
}
}, },
error: (message: string | Error) => { error: (message: string | Error) => {
messages.push({ type: "error", message }); messages.push({ type: "error", message });
// eslint-disable-next-line no-console if (logToConsole) {
console.error(message); // eslint-disable-next-line no-console
console.error(message);
}
}, },
isDebug: () => true, isDebug: () => true,
startGroup: () => undefined, startGroup: () => undefined,

View File

@@ -4,6 +4,7 @@ import type { VersionInfo } from "./codeql";
export enum ToolsFeature { export enum ToolsFeature {
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries", BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
BundleSupportsOverlay = "bundleSupportsOverlay",
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty", DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
ForceOverwrite = "forceOverwrite", ForceOverwrite = "forceOverwrite",
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries", IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",