mirror of
https://github.com/github/codeql-action.git
synced 2025-12-17 21:09:40 +08:00
Compare commits
21 Commits
default-se
...
henrymerce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7673a2de65 | ||
|
|
6b5763e5ee | ||
|
|
3322491022 | ||
|
|
6bc6217487 | ||
|
|
faf6d35e7b | ||
|
|
3b94cfeb15 | ||
|
|
b88acb2f6c | ||
|
|
241948c698 | ||
|
|
da77f9f638 | ||
|
|
de172624a1 | ||
|
|
488c1f1959 | ||
|
|
f2ccf3b4f1 | ||
|
|
f28848a66a | ||
|
|
5459b98ca0 | ||
|
|
0c8bfeaf84 | ||
|
|
1fe89fe9cb | ||
|
|
7a5748cf0d | ||
|
|
db75d46248 | ||
|
|
a0fc644617 | ||
|
|
e1058e4d74 | ||
|
|
d4f39b0766 |
6
.github/actions/verify-debug-artifact-scan-completed/action.yml
vendored
Normal file
6
.github/actions/verify-debug-artifact-scan-completed/action.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
name: Verify that the best-effort debug artifact scan completed
|
||||
description: Verifies that the best-effort debug artifact scan completed successfully during tests
|
||||
runs:
|
||||
using: node24
|
||||
main: index.js
|
||||
post: post.js
|
||||
2
.github/actions/verify-debug-artifact-scan-completed/index.js
vendored
Normal file
2
.github/actions/verify-debug-artifact-scan-completed/index.js
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// The main step is a no-op, since we can only verify artifact scan completion in the post step.
|
||||
console.log("Will verify artifact scan completion in the post step.");
|
||||
11
.github/actions/verify-debug-artifact-scan-completed/post.js
vendored
Normal file
11
.github/actions/verify-debug-artifact-scan-completed/post.js
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
// Post step - runs after the workflow completes, when artifact scan has finished
|
||||
const process = require("process");
|
||||
|
||||
const scanFinished = process.env.CODEQL_ACTION_ARTIFACT_SCAN_FINISHED;
|
||||
|
||||
if (scanFinished !== "true") {
|
||||
console.error("Error: Best-effort artifact scan did not complete. Expected CODEQL_ACTION_ARTIFACT_SCAN_FINISHED=true");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log("✓ Best-effort artifact scan completed successfully");
|
||||
@@ -58,6 +58,8 @@ jobs:
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: '9.x'
|
||||
- name: Assert best-effort artifact scan completed
|
||||
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/debug-artifacts-safe.yml
vendored
2
.github/workflows/debug-artifacts-safe.yml
vendored
@@ -54,6 +54,8 @@ jobs:
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: '9.x'
|
||||
- name: Assert best-effort artifact scan completed
|
||||
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
43185
lib/analyze-action-post.js
generated
43185
lib/analyze-action-post.js
generated
File diff suppressed because it is too large
Load Diff
37179
lib/analyze-action.js
generated
37179
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
36052
lib/autobuild-action.js
generated
36052
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
43270
lib/init-action-post.js
generated
43270
lib/init-action-post.js
generated
File diff suppressed because it is too large
Load Diff
37151
lib/init-action.js
generated
37151
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
36052
lib/resolve-environment-action.js
generated
36052
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
36058
lib/setup-codeql-action.js
generated
36058
lib/setup-codeql-action.js
generated
File diff suppressed because it is too large
Load Diff
42554
lib/start-proxy-action-post.js
generated
42554
lib/start-proxy-action-post.js
generated
File diff suppressed because it is too large
Load Diff
36419
lib/start-proxy-action.js
generated
36419
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
36070
lib/upload-lib.js
generated
36070
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
43089
lib/upload-sarif-action-post.js
generated
43089
lib/upload-sarif-action-post.js
generated
File diff suppressed because it is too large
Load Diff
36076
lib/upload-sarif-action.js
generated
36076
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
980
package-lock.json
generated
980
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@@ -24,12 +24,12 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^4.0.0",
|
||||
"@actions/artifact": "^5.0.1",
|
||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/cache": "^5.0.1",
|
||||
"@actions/core": "^2.0.1",
|
||||
"@actions/exec": "^2.0.0",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@actions/glob": "^0.5.0",
|
||||
"@actions/http-client": "^3.0.0",
|
||||
"@actions/io": "^2.0.0",
|
||||
@@ -51,7 +51,7 @@
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^2.0.0",
|
||||
"@eslint/eslintrc": "^3.3.3",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^16.0.0",
|
||||
"@types/archiver": "^7.0.0",
|
||||
@@ -61,7 +61,7 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^21.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.48.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.49.0",
|
||||
"@typescript-eslint/parser": "^8.48.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.27.1",
|
||||
|
||||
98
src/artifact-scanner.test.ts
Normal file
98
src/artifact-scanner.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
|
||||
import { scanArtifactsForTokens } from "./artifact-scanner";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
|
||||
|
||||
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file with a fake GitHub token
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(
|
||||
testFile,
|
||||
"This is a test file with token ghp_1234567890123456789012345678901234AB",
|
||||
);
|
||||
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/Found 1 potential GitHub token.*Personal Access Token/,
|
||||
);
|
||||
t.regex(error?.message || "", /test\.txt/);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("scanArtifactsForTokens handles files without tokens", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file without tokens
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(
|
||||
testFile,
|
||||
"This is a test file without any sensitive data",
|
||||
);
|
||||
|
||||
await t.notThrowsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
if (os.platform() !== "win32") {
|
||||
test("scanArtifactsForTokens finds token in debug artifacts", async (t) => {
|
||||
t.timeout(15000); // 15 seconds
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages, { logToConsole: false });
|
||||
// The zip here is a regression test based on
|
||||
// https://github.com/github/codeql-action/security/advisories/GHSA-vqf5-2xx6-9wfm
|
||||
const testZip = path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
"src",
|
||||
"testdata",
|
||||
"debug-artifacts-with-fake-token.zip",
|
||||
);
|
||||
|
||||
// This zip file contains a nested structure with a fake token in:
|
||||
// my-db-java-partial.zip/trap/java/invocations/kotlin.9017231652989744319.trap
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testZip], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/Found.*potential GitHub token/,
|
||||
"Should detect token in nested zip",
|
||||
);
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/kotlin\.9017231652989744319\.trap/,
|
||||
"Should report the .trap file containing the token",
|
||||
);
|
||||
|
||||
const logOutput = messages.map((msg) => msg.message).join("\n");
|
||||
t.regex(
|
||||
logOutput,
|
||||
/^Extracting gz file: .*\.gz$/m,
|
||||
"Logs should show that .gz files were extracted",
|
||||
);
|
||||
});
|
||||
}
|
||||
357
src/artifact-scanner.ts
Normal file
357
src/artifact-scanner.ts
Normal file
@@ -0,0 +1,357 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import * as exec from "@actions/exec";
|
||||
|
||||
import { Logger } from "./logging";
|
||||
import { getErrorMessage } from "./util";
|
||||
|
||||
/**
|
||||
* GitHub token patterns to scan for.
|
||||
* These patterns match various GitHub token formats.
|
||||
*/
|
||||
const GITHUB_TOKEN_PATTERNS = [
|
||||
{
|
||||
name: "Personal Access Token",
|
||||
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "OAuth Access Token",
|
||||
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "User-to-Server Token",
|
||||
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "Server-to-Server Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "Refresh Token",
|
||||
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "App Installation Access Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
|
||||
},
|
||||
];
|
||||
|
||||
interface TokenFinding {
|
||||
tokenType: string;
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
interface ScanResult {
|
||||
scannedFiles: number;
|
||||
findings: TokenFinding[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a file for GitHub tokens.
|
||||
*
|
||||
* @param filePath Path to the file to scan
|
||||
* @param relativePath Relative path for display purposes
|
||||
* @param logger Logger instance
|
||||
* @returns Array of token findings in the file
|
||||
*/
|
||||
function scanFileForTokens(
|
||||
filePath: string,
|
||||
relativePath: string,
|
||||
logger: Logger,
|
||||
): TokenFinding[] {
|
||||
const findings: TokenFinding[] = [];
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
|
||||
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
|
||||
const matches = content.match(pattern);
|
||||
if (matches) {
|
||||
for (let i = 0; i < matches.length; i++) {
|
||||
findings.push({ tokenType: name, filePath: relativePath });
|
||||
}
|
||||
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
return findings;
|
||||
} catch (e) {
|
||||
// If we can't read the file as text, it's likely binary or inaccessible
|
||||
logger.debug(
|
||||
`Could not scan file ${filePath} for tokens: ${getErrorMessage(e)}`,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
|
||||
*
|
||||
* @param archivePath Path to the archive file
|
||||
* @param relativeArchivePath Relative path of the archive for display
|
||||
* @param extractDir Directory to extract to
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth (to prevent infinite loops)
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanArchiveFile(
|
||||
archivePath: string,
|
||||
relativeArchivePath: string,
|
||||
extractDir: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const MAX_DEPTH = 10; // Prevent infinite recursion
|
||||
if (depth > MAX_DEPTH) {
|
||||
throw new Error(
|
||||
`Maximum archive extraction depth (${MAX_DEPTH}) reached for ${archivePath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
try {
|
||||
const tempExtractDir = fs.mkdtempSync(
|
||||
path.join(extractDir, `extract-${depth}-`),
|
||||
);
|
||||
|
||||
// Determine archive type and extract accordingly
|
||||
const fileName = path.basename(archivePath).toLowerCase();
|
||||
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
|
||||
// Extract tar.gz files
|
||||
logger.debug(`Extracting tar.gz file: ${archivePath}`);
|
||||
await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], {
|
||||
silent: true,
|
||||
});
|
||||
} else if (fileName.endsWith(".gz")) {
|
||||
// Extract .gz files (single file compression)
|
||||
logger.debug(`Extracting gz file: ${archivePath}`);
|
||||
const outputFile = path.join(
|
||||
tempExtractDir,
|
||||
path.basename(archivePath, ".gz"),
|
||||
);
|
||||
await exec.exec("gunzip", ["-c", archivePath], {
|
||||
outStream: fs.createWriteStream(outputFile),
|
||||
silent: true,
|
||||
});
|
||||
} else if (fileName.endsWith(".zip")) {
|
||||
// Extract zip files
|
||||
logger.debug(`Extracting zip file: ${archivePath}`);
|
||||
await exec.exec(
|
||||
"unzip",
|
||||
["-q", "-o", archivePath, "-d", tempExtractDir],
|
||||
{
|
||||
silent: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Scan the extracted contents
|
||||
const scanResult = await scanDirectory(
|
||||
tempExtractDir,
|
||||
relativeArchivePath,
|
||||
logger,
|
||||
depth + 1,
|
||||
);
|
||||
result.scannedFiles += scanResult.scannedFiles;
|
||||
result.findings.push(...scanResult.findings);
|
||||
|
||||
// Clean up extracted files
|
||||
fs.rmSync(tempExtractDir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a single file, including recursive archive extraction if applicable.
|
||||
*
|
||||
* @param fullPath Full path to the file
|
||||
* @param relativePath Relative path for display
|
||||
* @param extractDir Directory to use for extraction (for archive files)
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanFile(
|
||||
fullPath: string,
|
||||
relativePath: string,
|
||||
extractDir: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 1,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
// Check if it's an archive file and recursively scan it
|
||||
const fileName = path.basename(fullPath).toLowerCase();
|
||||
const isArchive =
|
||||
fileName.endsWith(".zip") ||
|
||||
fileName.endsWith(".tar.gz") ||
|
||||
fileName.endsWith(".tgz") ||
|
||||
fileName.endsWith(".gz");
|
||||
|
||||
if (isArchive) {
|
||||
const archiveResult = await scanArchiveFile(
|
||||
fullPath,
|
||||
relativePath,
|
||||
extractDir,
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += archiveResult.scannedFiles;
|
||||
result.findings.push(...archiveResult.findings);
|
||||
}
|
||||
|
||||
// Scan the file itself for tokens (unless it's a pure binary archive format)
|
||||
const fileFindings = scanFileForTokens(fullPath, relativePath, logger);
|
||||
result.findings.push(...fileFindings);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans a directory for GitHub tokens.
|
||||
*
|
||||
* @param dirPath Directory path to scan
|
||||
* @param baseRelativePath Base relative path for computing display paths
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanDirectory(
|
||||
dirPath: string,
|
||||
baseRelativePath: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
const relativePath = path.join(baseRelativePath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subResult = await scanDirectory(
|
||||
fullPath,
|
||||
relativePath,
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += subResult.scannedFiles;
|
||||
result.findings.push(...subResult.findings);
|
||||
} else if (entry.isFile()) {
|
||||
const fileResult = await scanFile(
|
||||
fullPath,
|
||||
relativePath,
|
||||
path.dirname(fullPath),
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += fileResult.scannedFiles;
|
||||
result.findings.push(...fileResult.findings);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a list of files and directories for GitHub tokens.
|
||||
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
|
||||
*
|
||||
* @param filesToScan List of file paths to scan
|
||||
* @param logger Logger instance
|
||||
* @returns Scan results
|
||||
*/
|
||||
export async function scanArtifactsForTokens(
|
||||
filesToScan: string[],
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
logger.info(
|
||||
"Starting best-effort check for potential GitHub tokens in debug artifacts (for testing purposes only)...",
|
||||
);
|
||||
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
// Create a temporary directory for extraction
|
||||
const tempScanDir = fs.mkdtempSync(path.join(os.tmpdir(), "artifact-scan-"));
|
||||
|
||||
try {
|
||||
for (const filePath of filesToScan) {
|
||||
const stats = fs.statSync(filePath);
|
||||
const fileName = path.basename(filePath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
const dirResult = await scanDirectory(filePath, fileName, logger);
|
||||
result.scannedFiles += dirResult.scannedFiles;
|
||||
result.findings.push(...dirResult.findings);
|
||||
} else if (stats.isFile()) {
|
||||
const fileResult = await scanFile(
|
||||
filePath,
|
||||
fileName,
|
||||
tempScanDir,
|
||||
logger,
|
||||
);
|
||||
result.scannedFiles += fileResult.scannedFiles;
|
||||
result.findings.push(...fileResult.findings);
|
||||
}
|
||||
}
|
||||
|
||||
// Compute statistics from findings
|
||||
const tokenTypesCounts = new Map<string, number>();
|
||||
const filesWithTokens = new Set<string>();
|
||||
for (const finding of result.findings) {
|
||||
tokenTypesCounts.set(
|
||||
finding.tokenType,
|
||||
(tokenTypesCounts.get(finding.tokenType) || 0) + 1,
|
||||
);
|
||||
filesWithTokens.add(finding.filePath);
|
||||
}
|
||||
|
||||
const tokenTypesSummary = Array.from(tokenTypesCounts.entries())
|
||||
.map(([type, count]) => `${count} ${type}${count > 1 ? "s" : ""}`)
|
||||
.join(", ");
|
||||
|
||||
const baseSummary = `scanned ${result.scannedFiles} files, found ${result.findings.length} potential token(s) in ${filesWithTokens.size} file(s)`;
|
||||
const summaryWithTypes = tokenTypesSummary
|
||||
? `${baseSummary} (${tokenTypesSummary})`
|
||||
: baseSummary;
|
||||
|
||||
logger.info(`Artifact check complete: ${summaryWithTypes}`);
|
||||
|
||||
if (result.findings.length > 0) {
|
||||
const fileList = Array.from(filesWithTokens).join(", ");
|
||||
throw new Error(
|
||||
`Found ${result.findings.length} potential GitHub token(s) (${tokenTypesSummary}) in debug artifacts at: ${fileList}. This is a best-effort check for testing purposes only.`,
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
fs.rmSync(tempScanDir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Could not clean up temporary scan directory: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import archiver from "archiver";
|
||||
|
||||
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
import { scanArtifactsForTokens } from "./artifact-scanner";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
@@ -23,6 +24,7 @@ import {
|
||||
getCodeQLDatabasePath,
|
||||
getErrorMessage,
|
||||
GitHubVariant,
|
||||
isInTestMode,
|
||||
listFolder,
|
||||
} from "./util";
|
||||
|
||||
@@ -269,6 +271,14 @@ export async function uploadDebugArtifacts(
|
||||
return "upload-not-supported";
|
||||
}
|
||||
|
||||
// When running in test mode, perform a best effort scan of the debug artifacts. The artifact
|
||||
// scanner is basic and not reliable or fast enough for production use, but it can help catch
|
||||
// some issues early.
|
||||
if (isInTestMode()) {
|
||||
await scanArtifactsForTokens(toUpload, logger);
|
||||
core.exportVariable("CODEQL_ACTION_ARTIFACT_SCAN_FINISHED", "true");
|
||||
}
|
||||
|
||||
let suffix = "";
|
||||
const matrix = getOptionalInput("matrix");
|
||||
if (matrix) {
|
||||
|
||||
BIN
src/testdata/debug-artifacts-with-fake-token.zip
vendored
Normal file
BIN
src/testdata/debug-artifacts-with-fake-token.zip
vendored
Normal file
Binary file not shown.
@@ -152,27 +152,38 @@ export interface LoggedMessage {
|
||||
message: string | Error;
|
||||
}
|
||||
|
||||
export function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
||||
export function getRecordingLogger(
|
||||
messages: LoggedMessage[],
|
||||
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
|
||||
): Logger {
|
||||
return {
|
||||
debug: (message: string) => {
|
||||
messages.push({ type: "debug", message });
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
}
|
||||
},
|
||||
info: (message: string) => {
|
||||
messages.push({ type: "info", message });
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(message);
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(message);
|
||||
}
|
||||
},
|
||||
warning: (message: string | Error) => {
|
||||
messages.push({ type: "warning", message });
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(message);
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(message);
|
||||
}
|
||||
},
|
||||
error: (message: string | Error) => {
|
||||
messages.push({ type: "error", message });
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
}
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
|
||||
Reference in New Issue
Block a user