Compare commits

..

14 Commits

Author SHA1 Message Date
Henry Mercer
ac6c41b910 Extract zstd files too 2025-12-17 15:34:12 +00:00
Henry Mercer
7673a2de65 Run testing Action using Node 24 2025-12-17 11:51:34 +00:00
Henry Mercer
6b5763e5ee Skip slow test on Windows 2025-12-17 11:47:39 +00:00
Henry Mercer
3322491022 Bump timeout on Windows 2025-12-17 11:41:55 +00:00
Henry Mercer
6bc6217487 Merge branch 'main' into henrymercer/scan-debug-artifacts 2025-12-17 11:36:38 +00:00
Henry Mercer
faf6d35e7b Verify using post step 2025-12-17 11:35:26 +00:00
Henry Mercer
3b94cfeb15 Avoid logging each extract call 2025-12-17 11:35:26 +00:00
Henry Mercer
da77f9f638 Suppress debug logs for artifact scanner test 2025-12-17 10:25:48 +00:00
Henry Mercer
de172624a1 Slim down test debug artifacts 2025-12-17 10:25:48 +00:00
Henry Mercer
488c1f1959 Add regression test for artifact scanner 2025-12-17 10:25:48 +00:00
Henry Mercer
f2ccf3b4f1 Ensure .gz files are extracted too 2025-12-17 10:25:47 +00:00
Henry Mercer
f28848a66a Use artifact scanner in debug artifacts PR checks 2025-12-17 10:25:47 +00:00
Henry Mercer
5459b98ca0 Add simple artifact scanner for tests only 2025-12-17 10:25:46 +00:00
Henry Mercer
0c8bfeaf84 Add artifact scanner 2025-12-17 10:25:46 +00:00
14 changed files with 3710 additions and 2451 deletions

View File

@@ -0,0 +1,6 @@
name: Verify that the best-effort debug artifact scan completed
description: Verifies that the best-effort debug artifact scan completed successfully during tests
runs:
using: node24
main: index.js
post: post.js

View File

@@ -0,0 +1,2 @@
// The main step is a no-op, since we can only verify artifact scan completion in the post step.
console.log("Will verify artifact scan completion in the post step.");

View File

@@ -0,0 +1,11 @@
// Post step - runs after the workflow completes, when artifact scan has finished
const process = require("process");
const scanFinished = process.env.CODEQL_ACTION_ARTIFACT_SCAN_FINISHED;
if (scanFinished !== "true") {
console.error("Error: Best-effort artifact scan did not complete. Expected CODEQL_ACTION_ARTIFACT_SCAN_FINISHED=true");
process.exit(1);
}
console.log("✓ Best-effort artifact scan completed successfully");

View File

@@ -58,6 +58,8 @@ jobs:
uses: actions/setup-dotnet@v5
with:
dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -54,6 +54,8 @@ jobs:
uses: actions/setup-dotnet@v5
with:
dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init
id: init
with:

File diff suppressed because it is too large Load Diff

1893
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

View File

@@ -19569,11 +19569,11 @@ var require_exec = __commonJS({
});
};
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.exec = exec;
exports2.exec = exec3;
exports2.getExecOutput = getExecOutput;
var string_decoder_1 = require("string_decoder");
var tr = __importStar2(require_toolrunner());
function exec(commandLine, args, options) {
function exec3(commandLine, args, options) {
return __awaiter2(this, void 0, void 0, function* () {
const commandArgs = tr.argStringToArray(commandLine);
if (commandArgs.length === 0) {
@@ -19607,7 +19607,7 @@ var require_exec = __commonJS({
}
};
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
const exitCode = yield exec3(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
stdout += stdoutDecoder.end();
stderr += stderrDecoder.end();
return {
@@ -19695,12 +19695,12 @@ var require_platform = __commonJS({
exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0;
exports2.getDetails = getDetails;
var os_1 = __importDefault2(require("os"));
var exec = __importStar2(require_exec());
var exec3 = __importStar2(require_exec());
var getWindowsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
const { stdout: version } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
silent: true
});
const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
const { stdout: name } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
silent: true
});
return {
@@ -19710,7 +19710,7 @@ var require_platform = __commonJS({
});
var getMacOsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
const { stdout } = yield exec.getExecOutput("sw_vers", void 0, {
const { stdout } = yield exec3.getExecOutput("sw_vers", void 0, {
silent: true
});
const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : "";
@@ -19721,7 +19721,7 @@ var require_platform = __commonJS({
};
});
var getLinuxInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
const { stdout } = yield exec3.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
silent: true
});
const [name, version] = stdout.trim().split("\n");
@@ -19819,7 +19819,7 @@ var require_core = __commonJS({
};
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0;
exports2.exportVariable = exportVariable5;
exports2.exportVariable = exportVariable6;
exports2.setSecret = setSecret;
exports2.addPath = addPath;
exports2.getInput = getInput2;
@@ -19851,7 +19851,7 @@ var require_core = __commonJS({
ExitCode2[ExitCode2["Success"] = 0] = "Success";
ExitCode2[ExitCode2["Failure"] = 1] = "Failure";
})(ExitCode || (exports2.ExitCode = ExitCode = {}));
function exportVariable5(name, val) {
function exportVariable6(name, val) {
const convertedVal = (0, utils_1.toCommandValue)(val);
process.env[name] = convertedVal;
const filePath = process.env["GITHUB_ENV"] || "";
@@ -32654,7 +32654,7 @@ var require_exec2 = __commonJS({
exports2.getExecOutput = exports2.exec = void 0;
var string_decoder_1 = require("string_decoder");
var tr = __importStar2(require_toolrunner2());
function exec(commandLine, args, options) {
function exec3(commandLine, args, options) {
return __awaiter2(this, void 0, void 0, function* () {
const commandArgs = tr.argStringToArray(commandLine);
if (commandArgs.length === 0) {
@@ -32666,7 +32666,7 @@ var require_exec2 = __commonJS({
return runner.exec();
});
}
exports2.exec = exec;
exports2.exec = exec3;
function getExecOutput(commandLine, args, options) {
var _a, _b;
return __awaiter2(this, void 0, void 0, function* () {
@@ -32689,7 +32689,7 @@ var require_exec2 = __commonJS({
}
};
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
const exitCode = yield exec3(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
stdout += stdoutDecoder.end();
stderr += stderrDecoder.end();
return {
@@ -32767,12 +32767,12 @@ var require_platform2 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.getDetails = exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0;
var os_1 = __importDefault2(require("os"));
var exec = __importStar2(require_exec2());
var exec3 = __importStar2(require_exec2());
var getWindowsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
const { stdout: version } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
silent: true
});
const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
const { stdout: name } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
silent: true
});
return {
@@ -32782,7 +32782,7 @@ var require_platform2 = __commonJS({
});
var getMacOsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
const { stdout } = yield exec.getExecOutput("sw_vers", void 0, {
const { stdout } = yield exec3.getExecOutput("sw_vers", void 0, {
silent: true
});
const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : "";
@@ -32793,7 +32793,7 @@ var require_platform2 = __commonJS({
};
});
var getLinuxInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
const { stdout } = yield exec3.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
silent: true
});
const [name, version] = stdout.trim().split("\n");
@@ -32893,7 +32893,7 @@ var require_core2 = __commonJS({
ExitCode2[ExitCode2["Success"] = 0] = "Success";
ExitCode2[ExitCode2["Failure"] = 1] = "Failure";
})(ExitCode || (exports2.ExitCode = ExitCode = {}));
function exportVariable5(name, val) {
function exportVariable6(name, val) {
const convertedVal = (0, utils_1.toCommandValue)(val);
process.env[name] = convertedVal;
const filePath = process.env["GITHUB_ENV"] || "";
@@ -32902,7 +32902,7 @@ var require_core2 = __commonJS({
}
(0, command_1.issueCommand)("set-env", { name }, convertedVal);
}
exports2.exportVariable = exportVariable5;
exports2.exportVariable = exportVariable6;
function setSecret(secret) {
(0, command_1.issueCommand)("add-mask", {}, secret);
}
@@ -36226,7 +36226,7 @@ var require_cacheUtils = __commonJS({
exports2.getCacheVersion = getCacheVersion;
exports2.getRuntimeToken = getRuntimeToken;
var core14 = __importStar2(require_core());
var exec = __importStar2(require_exec());
var exec3 = __importStar2(require_exec());
var glob2 = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
@@ -36306,7 +36306,7 @@ var require_cacheUtils = __commonJS({
additionalArgs.push("--version");
core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`);
try {
yield exec.exec(`${app}`, additionalArgs, {
yield exec3.exec(`${app}`, additionalArgs, {
ignoreReturnCode: true,
silent: true,
listeners: {
@@ -116499,7 +116499,7 @@ var require_exec3 = __commonJS({
exports2.getExecOutput = exports2.exec = void 0;
var string_decoder_1 = require("string_decoder");
var tr = __importStar2(require_toolrunner3());
function exec(commandLine, args, options) {
function exec3(commandLine, args, options) {
return __awaiter2(this, void 0, void 0, function* () {
const commandArgs = tr.argStringToArray(commandLine);
if (commandArgs.length === 0) {
@@ -116511,7 +116511,7 @@ var require_exec3 = __commonJS({
return runner.exec();
});
}
exports2.exec = exec;
exports2.exec = exec3;
function getExecOutput(commandLine, args, options) {
var _a, _b;
return __awaiter2(this, void 0, void 0, function* () {
@@ -116534,7 +116534,7 @@ var require_exec3 = __commonJS({
}
};
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
const exitCode = yield exec3(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
stdout += stdoutDecoder.end();
stderr += stderrDecoder.end();
return {
@@ -116612,12 +116612,12 @@ var require_platform3 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.getDetails = exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0;
var os_1 = __importDefault2(require("os"));
var exec = __importStar2(require_exec3());
var exec3 = __importStar2(require_exec3());
var getWindowsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
const { stdout: version } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
silent: true
});
const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
const { stdout: name } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
silent: true
});
return {
@@ -116627,7 +116627,7 @@ var require_platform3 = __commonJS({
});
var getMacOsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
const { stdout } = yield exec.getExecOutput("sw_vers", void 0, {
const { stdout } = yield exec3.getExecOutput("sw_vers", void 0, {
silent: true
});
const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : "";
@@ -116638,7 +116638,7 @@ var require_platform3 = __commonJS({
};
});
var getLinuxInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
const { stdout } = yield exec3.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
silent: true
});
const [name, version] = stdout.trim().split("\n");
@@ -116738,7 +116738,7 @@ var require_core4 = __commonJS({
ExitCode2[ExitCode2["Success"] = 0] = "Success";
ExitCode2[ExitCode2["Failure"] = 1] = "Failure";
})(ExitCode || (exports2.ExitCode = ExitCode = {}));
function exportVariable5(name, val) {
function exportVariable6(name, val) {
const convertedVal = (0, utils_1.toCommandValue)(val);
process.env[name] = convertedVal;
const filePath = process.env["GITHUB_ENV"] || "";
@@ -116747,7 +116747,7 @@ var require_core4 = __commonJS({
}
(0, command_1.issueCommand)("set-env", { name }, convertedVal);
}
exports2.exportVariable = exportVariable5;
exports2.exportVariable = exportVariable6;
function setSecret(secret) {
(0, command_1.issueCommand)("add-mask", {}, secret);
}
@@ -121763,7 +121763,7 @@ var require_exec4 = __commonJS({
exports2.getExecOutput = exports2.exec = void 0;
var string_decoder_1 = require("string_decoder");
var tr = __importStar2(require_toolrunner4());
function exec(commandLine, args, options) {
function exec3(commandLine, args, options) {
return __awaiter2(this, void 0, void 0, function* () {
const commandArgs = tr.argStringToArray(commandLine);
if (commandArgs.length === 0) {
@@ -121775,7 +121775,7 @@ var require_exec4 = __commonJS({
return runner.exec();
});
}
exports2.exec = exec;
exports2.exec = exec3;
function getExecOutput(commandLine, args, options) {
var _a, _b;
return __awaiter2(this, void 0, void 0, function* () {
@@ -121798,7 +121798,7 @@ var require_exec4 = __commonJS({
}
};
const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener });
const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
const exitCode = yield exec3(commandLine, args, Object.assign(Object.assign({}, options), { listeners }));
stdout += stdoutDecoder.end();
stderr += stderrDecoder.end();
return {
@@ -121876,12 +121876,12 @@ var require_platform4 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.getDetails = exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0;
var os_1 = __importDefault2(require("os"));
var exec = __importStar2(require_exec4());
var exec3 = __importStar2(require_exec4());
var getWindowsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
const { stdout: version } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, {
silent: true
});
const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
const { stdout: name } = yield exec3.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, {
silent: true
});
return {
@@ -121891,7 +121891,7 @@ var require_platform4 = __commonJS({
});
var getMacOsInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d;
const { stdout } = yield exec.getExecOutput("sw_vers", void 0, {
const { stdout } = yield exec3.getExecOutput("sw_vers", void 0, {
silent: true
});
const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : "";
@@ -121902,7 +121902,7 @@ var require_platform4 = __commonJS({
};
});
var getLinuxInfo = () => __awaiter2(void 0, void 0, void 0, function* () {
const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
const { stdout } = yield exec3.getExecOutput("lsb_release", ["-i", "-r", "-s"], {
silent: true
});
const [name, version] = stdout.trim().split("\n");
@@ -122002,7 +122002,7 @@ var require_core5 = __commonJS({
ExitCode2[ExitCode2["Success"] = 0] = "Success";
ExitCode2[ExitCode2["Failure"] = 1] = "Failure";
})(ExitCode || (exports2.ExitCode = ExitCode = {}));
function exportVariable5(name, val) {
function exportVariable6(name, val) {
const convertedVal = (0, utils_1.toCommandValue)(val);
process.env[name] = convertedVal;
const filePath = process.env["GITHUB_ENV"] || "";
@@ -122011,7 +122011,7 @@ var require_core5 = __commonJS({
}
(0, command_1.issueCommand)("set-env", { name }, convertedVal);
}
exports2.exportVariable = exportVariable5;
exports2.exportVariable = exportVariable6;
function setSecret(secret) {
(0, command_1.issueCommand)("add-mask", {}, secret);
}
@@ -126807,6 +126807,9 @@ var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var actionsCache3 = __toESM(require_cache4());
var glob = __toESM(require_glob());
// src/artifact-scanner.ts
var exec = __toESM(require_exec());
// src/debug-artifacts.ts
async function getArtifactUploaderClient(logger, ghVariant) {
if (ghVariant === "GitHub Enterprise Server" /* GHES */) {

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,98 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import test from "ava";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { getRunnerLogger } from "./logging";
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file with a fake GitHub token
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file with token ghp_1234567890123456789012345678901234AB",
);
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
t.regex(
error?.message || "",
/Found 1 potential GitHub token.*Personal Access Token/,
);
t.regex(error?.message || "", /test\.txt/);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
test("scanArtifactsForTokens handles files without tokens", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file without tokens
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file without any sensitive data",
);
await t.notThrowsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
if (os.platform() !== "win32") {
test("scanArtifactsForTokens finds token in debug artifacts", async (t) => {
t.timeout(15000); // 15 seconds
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages, { logToConsole: false });
// The zip here is a regression test based on
// https://github.com/github/codeql-action/security/advisories/GHSA-vqf5-2xx6-9wfm
const testZip = path.join(
__dirname,
"..",
"src",
"testdata",
"debug-artifacts-with-fake-token.zip",
);
// This zip file contains a nested structure with a fake token in:
// my-db-java-partial.zip/trap/java/invocations/kotlin.9017231652989744319.trap
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testZip], logger),
);
t.regex(
error?.message || "",
/Found.*potential GitHub token/,
"Should detect token in nested zip",
);
t.regex(
error?.message || "",
/kotlin\.9017231652989744319\.trap/,
"Should report the .trap file containing the token",
);
const logOutput = messages.map((msg) => msg.message).join("\n");
t.regex(
logOutput,
/^Extracting gz file: .*\.gz$/m,
"Logs should show that .gz files were extracted",
);
});
}

379
src/artifact-scanner.ts Normal file
View File

@@ -0,0 +1,379 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as exec from "@actions/exec";
import { Logger } from "./logging";
import { getErrorMessage } from "./util";
/**
* GitHub token patterns to scan for.
* These patterns match various GitHub token formats.
*/
const GITHUB_TOKEN_PATTERNS = [
{
name: "Personal Access Token",
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
},
{
name: "OAuth Access Token",
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
},
{
name: "User-to-Server Token",
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Server-to-Server Token",
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Refresh Token",
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
},
{
name: "App Installation Access Token",
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
},
];
interface TokenFinding {
tokenType: string;
filePath: string;
}
interface ScanResult {
scannedFiles: number;
findings: TokenFinding[];
}
/**
* Scans a file for GitHub tokens.
*
* @param filePath Path to the file to scan
* @param relativePath Relative path for display purposes
* @param logger Logger instance
* @returns Array of token findings in the file
*/
function scanFileForTokens(
filePath: string,
relativePath: string,
logger: Logger,
): TokenFinding[] {
const findings: TokenFinding[] = [];
try {
const content = fs.readFileSync(filePath, "utf8");
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
const matches = content.match(pattern);
if (matches) {
for (let i = 0; i < matches.length; i++) {
findings.push({ tokenType: name, filePath: relativePath });
}
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
}
}
return findings;
} catch (e) {
// If we can't read the file as text, it's likely binary or inaccessible
logger.debug(
`Could not scan file ${filePath} for tokens: ${getErrorMessage(e)}`,
);
return [];
}
}
/**
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param archivePath Path to the archive file
* @param relativeArchivePath Relative path of the archive for display
* @param extractDir Directory to extract to
* @param logger Logger instance
* @param depth Current recursion depth (to prevent infinite loops)
* @returns Scan results
*/
async function scanArchiveFile(
archivePath: string,
relativeArchivePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const MAX_DEPTH = 10; // Prevent infinite recursion
if (depth > MAX_DEPTH) {
throw new Error(
`Maximum archive extraction depth (${MAX_DEPTH}) reached for ${archivePath}`,
);
}
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
try {
const tempExtractDir = fs.mkdtempSync(
path.join(extractDir, `extract-${depth}-`),
);
// Determine archive type and extract accordingly
const fileName = path.basename(archivePath).toLowerCase();
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
// Extract tar.gz files
logger.debug(`Extracting tar.gz file: ${archivePath}`);
await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], {
silent: true,
});
} else if (fileName.endsWith(".tar.zst")) {
// Extract tar.zst files
logger.debug(`Extracting tar.zst file: ${archivePath}`);
await exec.exec(
"tar",
["--zstd", "-xf", archivePath, "-C", tempExtractDir],
{
silent: true,
},
);
} else if (fileName.endsWith(".zst")) {
// Extract .zst files (single file compression)
logger.debug(`Extracting zst file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".zst"),
);
await exec.exec("zstd", ["-d", archivePath, "-o", outputFile], {
silent: true,
});
} else if (fileName.endsWith(".gz")) {
// Extract .gz files (single file compression)
logger.debug(`Extracting gz file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".gz"),
);
await exec.exec("gunzip", ["-c", archivePath], {
outStream: fs.createWriteStream(outputFile),
silent: true,
});
} else if (fileName.endsWith(".zip")) {
// Extract zip files
logger.debug(`Extracting zip file: ${archivePath}`);
await exec.exec(
"unzip",
["-q", "-o", archivePath, "-d", tempExtractDir],
{
silent: true,
},
);
}
// Scan the extracted contents
const scanResult = await scanDirectory(
tempExtractDir,
relativeArchivePath,
logger,
depth + 1,
);
result.scannedFiles += scanResult.scannedFiles;
result.findings.push(...scanResult.findings);
// Clean up extracted files
fs.rmSync(tempExtractDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}`,
);
}
return result;
}
/**
* Scans a single file, including recursive archive extraction if applicable.
*
* @param fullPath Full path to the file
* @param relativePath Relative path for display
* @param extractDir Directory to use for extraction (for archive files)
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanFile(
fullPath: string,
relativePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 1,
findings: [],
};
// Check if it's an archive file and recursively scan it
const fileName = path.basename(fullPath).toLowerCase();
const isArchive =
fileName.endsWith(".zip") ||
fileName.endsWith(".tar.gz") ||
fileName.endsWith(".tgz") ||
fileName.endsWith(".tar.zst") ||
fileName.endsWith(".zst") ||
fileName.endsWith(".gz");
if (isArchive) {
const archiveResult = await scanArchiveFile(
fullPath,
relativePath,
extractDir,
logger,
depth,
);
result.scannedFiles += archiveResult.scannedFiles;
result.findings.push(...archiveResult.findings);
}
// Scan the file itself for tokens (unless it's a pure binary archive format)
const fileFindings = scanFileForTokens(fullPath, relativePath, logger);
result.findings.push(...fileFindings);
return result;
}
/**
* Recursively scans a directory for GitHub tokens.
*
* @param dirPath Directory path to scan
* @param baseRelativePath Base relative path for computing display paths
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanDirectory(
dirPath: string,
baseRelativePath: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.join(baseRelativePath, entry.name);
if (entry.isDirectory()) {
const subResult = await scanDirectory(
fullPath,
relativePath,
logger,
depth,
);
result.scannedFiles += subResult.scannedFiles;
result.findings.push(...subResult.findings);
} else if (entry.isFile()) {
const fileResult = await scanFile(
fullPath,
relativePath,
path.dirname(fullPath),
logger,
depth,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
return result;
}
/**
* Scans a list of files and directories for GitHub tokens.
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param filesToScan List of file paths to scan
* @param logger Logger instance
* @returns Scan results
*/
export async function scanArtifactsForTokens(
filesToScan: string[],
logger: Logger,
): Promise<void> {
logger.info(
"Starting best-effort check for potential GitHub tokens in debug artifacts (for testing purposes only)...",
);
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
// Create a temporary directory for extraction
const tempScanDir = fs.mkdtempSync(path.join(os.tmpdir(), "artifact-scan-"));
try {
for (const filePath of filesToScan) {
const stats = fs.statSync(filePath);
const fileName = path.basename(filePath);
if (stats.isDirectory()) {
const dirResult = await scanDirectory(filePath, fileName, logger);
result.scannedFiles += dirResult.scannedFiles;
result.findings.push(...dirResult.findings);
} else if (stats.isFile()) {
const fileResult = await scanFile(
filePath,
fileName,
tempScanDir,
logger,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
// Compute statistics from findings
const tokenTypesCounts = new Map<string, number>();
const filesWithTokens = new Set<string>();
for (const finding of result.findings) {
tokenTypesCounts.set(
finding.tokenType,
(tokenTypesCounts.get(finding.tokenType) || 0) + 1,
);
filesWithTokens.add(finding.filePath);
}
const tokenTypesSummary = Array.from(tokenTypesCounts.entries())
.map(([type, count]) => `${count} ${type}${count > 1 ? "s" : ""}`)
.join(", ");
const baseSummary = `scanned ${result.scannedFiles} files, found ${result.findings.length} potential token(s) in ${filesWithTokens.size} file(s)`;
const summaryWithTypes = tokenTypesSummary
? `${baseSummary} (${tokenTypesSummary})`
: baseSummary;
logger.info(`Artifact check complete: ${summaryWithTypes}`);
if (result.findings.length > 0) {
const fileList = Array.from(filesWithTokens).join(", ");
throw new Error(
`Found ${result.findings.length} potential GitHub token(s) (${tokenTypesSummary}) in debug artifacts at: ${fileList}. This is a best-effort check for testing purposes only.`,
);
}
} finally {
// Clean up temporary directory
try {
fs.rmSync(tempScanDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not clean up temporary scan directory: ${getErrorMessage(e)}`,
);
}
}
}

View File

@@ -8,6 +8,7 @@ import archiver from "archiver";
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
import { dbIsFinalized } from "./analyze";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { type CodeQL } from "./codeql";
import { Config } from "./config-utils";
import { EnvVar } from "./environment";
@@ -23,6 +24,7 @@ import {
getCodeQLDatabasePath,
getErrorMessage,
GitHubVariant,
isInTestMode,
listFolder,
} from "./util";
@@ -269,6 +271,14 @@ export async function uploadDebugArtifacts(
return "upload-not-supported";
}
// When running in test mode, perform a best effort scan of the debug artifacts. The artifact
// scanner is basic and not reliable or fast enough for production use, but it can help catch
// some issues early.
if (isInTestMode()) {
await scanArtifactsForTokens(toUpload, logger);
core.exportVariable("CODEQL_ACTION_ARTIFACT_SCAN_FINISHED", "true");
}
let suffix = "";
const matrix = getOptionalInput("matrix");
if (matrix) {

Binary file not shown.

View File

@@ -152,27 +152,38 @@ export interface LoggedMessage {
message: string | Error;
}
export function getRecordingLogger(messages: LoggedMessage[]): Logger {
export function getRecordingLogger(
messages: LoggedMessage[],
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
): Logger {
return {
debug: (message: string) => {
messages.push({ type: "debug", message });
// eslint-disable-next-line no-console
console.debug(message);
if (logToConsole) {
// eslint-disable-next-line no-console
console.debug(message);
}
},
info: (message: string) => {
messages.push({ type: "info", message });
// eslint-disable-next-line no-console
console.info(message);
if (logToConsole) {
// eslint-disable-next-line no-console
console.info(message);
}
},
warning: (message: string | Error) => {
messages.push({ type: "warning", message });
// eslint-disable-next-line no-console
console.warn(message);
if (logToConsole) {
// eslint-disable-next-line no-console
console.warn(message);
}
},
error: (message: string | Error) => {
messages.push({ type: "error", message });
// eslint-disable-next-line no-console
console.error(message);
if (logToConsole) {
// eslint-disable-next-line no-console
console.error(message);
}
},
isDebug: () => true,
startGroup: () => undefined,