mirror of
https://github.com/github/codeql-action.git
synced 2025-12-06 07:48:17 +08:00
Merge pull request #3252 from github/update-v4.31.1-777daa0c7
Merge main into releases/v4
This commit is contained in:
2
.github/workflows/__bundle-zstd.yml
generated
vendored
2
.github/workflows/__bundle-zstd.yml
generated
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__config-export.yml
generated
vendored
2
.github/workflows/__config-export.yml
generated
vendored
@@ -67,7 +67,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -78,7 +78,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
6
.github/workflows/__quality-queries.yml
generated
vendored
6
.github/workflows/__quality-queries.yml
generated
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -91,14 +91,14 @@ jobs:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## 4.31.1 - 30 Oct 2025
|
||||
|
||||
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
|
||||
|
||||
## 4.31.0 - 24 Oct 2025
|
||||
|
||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
||||
|
||||
@@ -32,14 +32,10 @@ inputs:
|
||||
and 13GB for macOS).
|
||||
required: false
|
||||
add-snippets:
|
||||
description: Specify whether or not to add code snippets to the output sarif file.
|
||||
description: Does not have any effect.
|
||||
required: false
|
||||
default: "false"
|
||||
deprecationMessage: >-
|
||||
The input "add-snippets" is deprecated and will be removed on the first release in August 2025.
|
||||
When this input is set to true it is expected to add code snippets with an alert to the SARIF file.
|
||||
However, since Code Scanning ignores code snippets provided as part of a SARIF file this is currently
|
||||
a no operation. No alternative is available.
|
||||
The input "add-snippets" has been removed and no longer has any effect.
|
||||
skip-queries:
|
||||
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
||||
required: false
|
||||
|
||||
7820
lib/analyze-action-post.js
generated
7820
lib/analyze-action-post.js
generated
File diff suppressed because one or more lines are too long
9209
lib/analyze-action.js
generated
9209
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
1368
lib/autobuild-action.js
generated
1368
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
16729
lib/init-action-post.js
generated
16729
lib/init-action-post.js
generated
File diff suppressed because one or more lines are too long
8938
lib/init-action.js
generated
8938
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
1376
lib/resolve-environment-action.js
generated
1376
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
8686
lib/setup-codeql-action.js
generated
8686
lib/setup-codeql-action.js
generated
File diff suppressed because it is too large
Load Diff
7759
lib/start-proxy-action-post.js
generated
7759
lib/start-proxy-action-post.js
generated
File diff suppressed because one or more lines are too long
1863
lib/start-proxy-action.js
generated
1863
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
8480
lib/upload-lib.js
generated
8480
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
7751
lib/upload-sarif-action-post.js
generated
7751
lib/upload-sarif-action-post.js
generated
File diff suppressed because one or more lines are too long
8766
lib/upload-sarif-action.js
generated
8766
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
701
package-lock.json
generated
701
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
12
package.json
12
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.31.0",
|
||||
"version": "4.31.1",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.3.1",
|
||||
"@actions/artifact": "^4.0.0",
|
||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
@@ -38,9 +38,7 @@
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
"console-log-level": "^1.4.1",
|
||||
"del": "^8.0.0",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"get-folder-size": "^5.0.0",
|
||||
@@ -58,8 +56,8 @@
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.38.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@octokit/types": "^15.0.1",
|
||||
"@types/archiver": "^6.0.4",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
@@ -67,7 +65,7 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.2",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.11",
|
||||
|
||||
@@ -27,7 +27,7 @@ steps:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
@@ -12,7 +12,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -25,7 +25,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -17,7 +17,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -11,7 +11,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -39,7 +39,7 @@ steps:
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -47,14 +47,14 @@ steps:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -9,9 +9,15 @@ if [ "$GITHUB_ACTIONS" = "true" ]; then
|
||||
fi
|
||||
|
||||
# Check if npm install is likely needed before proceeding
|
||||
if [ ! -d node_modules ] || [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated..."
|
||||
if [ ! -d node_modules ]; then
|
||||
echo "Running 'npm install' because 'node_modules' directory is missing."
|
||||
npm install
|
||||
elif [ package.json -nt package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'package-lock.json' appears to be outdated."
|
||||
npm install
|
||||
elif [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated."
|
||||
npm install
|
||||
else
|
||||
echo "Skipping 'npm install' because 'node_modules/.package-lock.json' appears to be up-to-date."
|
||||
echo "Skipping 'npm install' because everything appears to be up-to-date."
|
||||
fi
|
||||
|
||||
@@ -78,7 +78,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -76,7 +76,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -324,10 +324,16 @@ async function run() {
|
||||
);
|
||||
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
// Warn if the removed `add-snippets` input is used.
|
||||
if (actionsUtil.getOptionalInput("add-snippets") !== undefined) {
|
||||
logger.warning(
|
||||
"The `add-snippets` input has been removed and no longer has any effect.",
|
||||
);
|
||||
}
|
||||
|
||||
runStats = await runQueries(
|
||||
outputDir,
|
||||
memory,
|
||||
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
|
||||
threads,
|
||||
diffRangePackDir,
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
|
||||
@@ -4,10 +4,8 @@ import * as path from "path";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import {
|
||||
exportedForTesting,
|
||||
runQueries,
|
||||
defaultSuites,
|
||||
resolveQuerySuiteAlias,
|
||||
@@ -39,7 +37,6 @@ test("status report fields", async (t) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const memoryFlag = "";
|
||||
const addSnippetsFlag = "";
|
||||
const threadsFlag = "";
|
||||
sinon.stub(uploadLib, "validateSarifFileSchema");
|
||||
|
||||
@@ -105,7 +102,6 @@ test("status report fields", async (t) => {
|
||||
const statusReport = await runQueries(
|
||||
tmpDir,
|
||||
memoryFlag,
|
||||
addSnippetsFlag,
|
||||
threadsFlag,
|
||||
undefined,
|
||||
undefined,
|
||||
@@ -131,204 +127,6 @@ test("status report fields", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return exportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
test("resolveQuerySuiteAlias", (t) => {
|
||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||
for (const suite of defaultSuites) {
|
||||
|
||||
197
src/analyze.ts
197
src/analyze.ts
@@ -3,16 +3,10 @@ import * as path from "path";
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
import * as io from "@actions/io";
|
||||
import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
PullRequestBranches,
|
||||
} from "./actions-util";
|
||||
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
import { setupCppAutobuild } from "./autobuild";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
@@ -21,13 +15,13 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
||||
import {
|
||||
DiffThunkRange,
|
||||
writeDiffRangesJsonFile,
|
||||
getPullRequestEditedDiffRanges,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
import { endTracingForCluster } from "./tracer-config";
|
||||
import * as util from "./util";
|
||||
@@ -313,185 +307,6 @@ export async function setupDiffInformedQueryRun(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API.
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an extension pack in the temporary directory that contains the file
|
||||
* line ranges that were added or modified in the pull request.
|
||||
@@ -621,7 +436,6 @@ export function addSarifExtension(
|
||||
export async function runQueries(
|
||||
sarifFolder: string,
|
||||
memoryFlag: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
diffRangePackDir: string | undefined,
|
||||
automationDetailsId: string | undefined,
|
||||
@@ -811,7 +625,6 @@ export async function runQueries(
|
||||
databasePath,
|
||||
queries,
|
||||
sarifFile,
|
||||
addSnippetsFlag,
|
||||
threadsFlag,
|
||||
enableDebugLogging ? "-vv" : "-v",
|
||||
sarifRunPropertyFlag,
|
||||
@@ -855,7 +668,7 @@ export async function runFinalize(
|
||||
logger: Logger,
|
||||
): Promise<DatabaseCreationTimings> {
|
||||
try {
|
||||
await del.deleteAsync(outputDir, { force: true });
|
||||
await fs.promises.rm(outputDir, { force: true, recursive: true });
|
||||
} catch (error: any) {
|
||||
if (error?.code !== "ENOENT") {
|
||||
throw error;
|
||||
@@ -922,7 +735,3 @@ export async function warnIfGoInstalledAfterInit(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
@@ -169,4 +169,39 @@ test("wrapApiConfigurationError correctly wraps specific configuration errors",
|
||||
res,
|
||||
new util.ConfigurationError("Resource not accessible by integration"),
|
||||
);
|
||||
|
||||
// Enablement errors.
|
||||
const codeSecurityNotEnabledError = new util.HTTPError(
|
||||
"Code Security must be enabled for this repository to use code scanning",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(codeSecurityNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(codeSecurityNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
const advancedSecurityNotEnabledError = new util.HTTPError(
|
||||
"Advanced Security must be enabled for this repository to use code scanning",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(advancedSecurityNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(advancedSecurityNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
const codeScanningNotEnabledError = new util.HTTPError(
|
||||
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(codeScanningNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(codeScanningNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as githubUtils from "@actions/github/lib/utils";
|
||||
import * as retry from "@octokit/plugin-retry";
|
||||
import consoleLogLevel from "console-log-level";
|
||||
|
||||
import { getActionVersion, getRequiredInput } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
@@ -50,7 +49,12 @@ function createApiClientWithDetails(
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: consoleLogLevel({ level: "debug" }),
|
||||
log: {
|
||||
debug: core.debug,
|
||||
info: core.info,
|
||||
warn: core.warning,
|
||||
error: core.error,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -279,6 +283,20 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
||||
});
|
||||
}
|
||||
|
||||
function isEnablementError(msg: string) {
|
||||
return [
|
||||
/Code Security must be enabled/,
|
||||
/Advanced Security must be enabled/,
|
||||
/Code Scanning is not enabled/,
|
||||
].some((pattern) => pattern.test(msg));
|
||||
}
|
||||
|
||||
// TODO: Move to `error-messages.ts` after refactoring import order to avoid cycle
|
||||
// since `error-messages.ts` currently depends on this file.
|
||||
export function getFeatureEnablementError(message: string): string {
|
||||
return `Please verify that the necessary features are enabled: ${message}`;
|
||||
}
|
||||
|
||||
export function wrapApiConfigurationError(e: unknown) {
|
||||
const httpError = asHTTPError(e);
|
||||
if (httpError !== undefined) {
|
||||
@@ -300,6 +318,11 @@ export function wrapApiConfigurationError(e: unknown) {
|
||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||
);
|
||||
}
|
||||
if (httpError.status === 403 && isEnablementError(httpError.message)) {
|
||||
return new ConfigurationError(
|
||||
getFeatureEnablementError(httpError.message),
|
||||
);
|
||||
}
|
||||
if (httpError.status === 429) {
|
||||
return new ConfigurationError("API rate limit exceeded");
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
import nock from "nock";
|
||||
import * as sinon from "sinon";
|
||||
@@ -557,7 +556,7 @@ const injectedConfigMacro = test.macro({
|
||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(augmentedConfig, expectedConfig);
|
||||
|
||||
await del.deleteAsync(configFile, { force: true });
|
||||
await fs.promises.rm(configFile, { force: true });
|
||||
});
|
||||
},
|
||||
|
||||
@@ -1046,7 +1045,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP
|
||||
);
|
||||
t.truthy(configArg, "Should have injected a codescanning config");
|
||||
const configFile = configArg!.split("=")[1];
|
||||
await del.deleteAsync(configFile, { force: true });
|
||||
await fs.promises.rm(configFile, { force: true });
|
||||
});
|
||||
|
||||
export function stubToolRunnerConstructor(
|
||||
|
||||
@@ -167,7 +167,6 @@ export interface CodeQL {
|
||||
databasePath: string,
|
||||
querySuitePaths: string[] | undefined,
|
||||
sarifFile: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string | undefined,
|
||||
sarifRunPropertyFlag: string | undefined,
|
||||
@@ -817,7 +816,6 @@ export async function getCodeQLForCmd(
|
||||
databasePath: string,
|
||||
querySuitePaths: string[] | undefined,
|
||||
sarifFile: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string,
|
||||
sarifRunPropertyFlag: string | undefined,
|
||||
@@ -836,7 +834,6 @@ export async function getCodeQLForCmd(
|
||||
"--format=sarif-latest",
|
||||
verbosityFlag,
|
||||
`--output=${sarifFile}`,
|
||||
addSnippetsFlag,
|
||||
"--print-diagnostics-summary",
|
||||
"--print-metrics-summary",
|
||||
"--sarif-add-baseline-file-info",
|
||||
|
||||
@@ -5,7 +5,6 @@ import * as artifact from "@actions/artifact";
|
||||
import * as artifactLegacy from "@actions/artifact-legacy";
|
||||
import * as core from "@actions/core";
|
||||
import archiver from "archiver";
|
||||
import * as del from "del";
|
||||
|
||||
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
@@ -345,7 +344,7 @@ async function createPartialDatabaseBundle(
|
||||
);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await del.deleteAsync(databaseBundlePath, { force: true });
|
||||
await fs.promises.rm(databaseBundlePath, { force: true });
|
||||
}
|
||||
const output = fs.createWriteStream(databaseBundlePath);
|
||||
const zip = archiver("zip");
|
||||
|
||||
@@ -4,7 +4,10 @@ import * as sinon from "sinon";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import * as apiClient from "./api-client";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import {
|
||||
shouldPerformDiffInformedAnalysis,
|
||||
exportedForTesting,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
@@ -183,3 +186,201 @@ test(
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return exportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
@@ -3,12 +3,25 @@ import * as path from "path";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { getApiClient, getGitHubVersion } from "./api-client";
|
||||
import type { CodeQL } from "./codeql";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
import { GitHubVariant, satisfiesGHESVersion } from "./util";
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API.
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the action should perform diff-informed analysis.
|
||||
*/
|
||||
@@ -93,3 +106,174 @@ export function readDiffRangesJsonFile(
|
||||
);
|
||||
return JSON.parse(jsonContents) as DiffThunkRange[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
export async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
@@ -137,4 +137,10 @@ export enum EnvVar {
|
||||
* This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option.
|
||||
*/
|
||||
SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD",
|
||||
|
||||
/**
|
||||
* Whether to skip workflow validation. Intended for internal use, where we know that
|
||||
* the workflow is valid and validation is not necessary.
|
||||
*/
|
||||
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@ import {
|
||||
getErrorMessage,
|
||||
BuildMode,
|
||||
} from "./util";
|
||||
import { validateWorkflow } from "./workflow";
|
||||
import { checkWorkflow } from "./workflow";
|
||||
|
||||
/**
|
||||
* Sends a status report indicating that the `init` Action is starting.
|
||||
@@ -288,16 +288,9 @@ async function run() {
|
||||
toolsSource = initCodeQLResult.toolsSource;
|
||||
zstdAvailability = initCodeQLResult.zstdAvailability;
|
||||
|
||||
core.startGroup("Validating workflow");
|
||||
const validateWorkflowResult = await validateWorkflow(codeql, logger);
|
||||
if (validateWorkflowResult === undefined) {
|
||||
logger.info("Detected no issues with the code scanning workflow.");
|
||||
} else {
|
||||
logger.warning(
|
||||
`Unable to validate code scanning workflow: ${validateWorkflowResult}`,
|
||||
);
|
||||
}
|
||||
core.endGroup();
|
||||
// Check the workflow for problems. If there are any problems, they are reported
|
||||
// to the workflow log. No exceptions are thrown.
|
||||
await checkWorkflow(logger, codeql);
|
||||
|
||||
// Set CODEQL_ENABLE_EXPERIMENTAL_FEATURES for Rust if between 2.19.3 (included) and 2.22.1 (excluded)
|
||||
// We need to set this environment variable before initializing the config, otherwise Rust
|
||||
|
||||
@@ -13,7 +13,15 @@ export interface Logger {
|
||||
}
|
||||
|
||||
export function getActionsLogger(): Logger {
|
||||
return core;
|
||||
return {
|
||||
debug: core.debug,
|
||||
info: core.info,
|
||||
warning: core.warning,
|
||||
error: core.error,
|
||||
isDebug: core.isDebug,
|
||||
startGroup: core.startGroup,
|
||||
endGroup: core.endGroup,
|
||||
};
|
||||
}
|
||||
|
||||
export function getRunnerLogger(debugMode: boolean): Logger {
|
||||
|
||||
@@ -11,6 +11,8 @@ import * as gitUtils from "./git-utils";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {
|
||||
downloadOverlayBaseDatabaseFromCache,
|
||||
getCacheRestoreKeyPrefix,
|
||||
getCacheSaveKey,
|
||||
OverlayDatabaseMode,
|
||||
writeBaseDatabaseOidsFile,
|
||||
writeOverlayChangesFile,
|
||||
@@ -261,3 +263,48 @@ test(
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
test("overlay-base database cache keys remain stable", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const config = createTestConfig({ languages: ["python", "javascript"] });
|
||||
const codeQlVersion = "2.23.0";
|
||||
const commitOid = "abc123def456";
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(gitUtils, "getCommitOid").resolves(commitOid);
|
||||
sinon.stub(actionsUtil, "getWorkflowRunID").returns(12345);
|
||||
sinon.stub(actionsUtil, "getWorkflowRunAttempt").returns(1);
|
||||
|
||||
const saveKey = await getCacheSaveKey(
|
||||
config,
|
||||
codeQlVersion,
|
||||
"checkout-path",
|
||||
logger,
|
||||
);
|
||||
const expectedSaveKey =
|
||||
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123def456-12345-1";
|
||||
t.is(
|
||||
saveKey,
|
||||
expectedSaveKey,
|
||||
"Cache save key changed unexpectedly. " +
|
||||
"This may indicate breaking changes in the cache key generation logic.",
|
||||
);
|
||||
|
||||
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
|
||||
config,
|
||||
codeQlVersion,
|
||||
);
|
||||
const expectedRestoreKeyPrefix =
|
||||
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-";
|
||||
t.is(
|
||||
restoreKeyPrefix,
|
||||
expectedRestoreKeyPrefix,
|
||||
"Cache restore key prefix changed unexpectedly. " +
|
||||
"This may indicate breaking changes in the cache key generation logic.",
|
||||
);
|
||||
|
||||
t.true(
|
||||
saveKey.startsWith(restoreKeyPrefix),
|
||||
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4,13 +4,19 @@ import * as path from "path";
|
||||
|
||||
import * as actionsCache from "@actions/cache";
|
||||
|
||||
import { getRequiredInput, getTemporaryDirectory } from "./actions-util";
|
||||
import {
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
getWorkflowRunAttempt,
|
||||
getWorkflowRunID,
|
||||
} from "./actions-util";
|
||||
import { getAutomationID } from "./api-client";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { type Config } from "./config-utils";
|
||||
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import {
|
||||
getErrorMessage,
|
||||
isInTestMode,
|
||||
tryGetFolderBytes,
|
||||
waitForResultWithTimeLimit,
|
||||
@@ -266,6 +272,7 @@ export async function uploadOverlayBaseDatabaseToCache(
|
||||
config,
|
||||
codeQlVersion,
|
||||
checkoutPath,
|
||||
logger,
|
||||
);
|
||||
logger.info(
|
||||
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
|
||||
@@ -443,17 +450,28 @@ export async function downloadOverlayBaseDatabaseFromCache(
|
||||
* The key consists of the restore key prefix (which does not include the
|
||||
* commit SHA) and the commit SHA of the current checkout.
|
||||
*/
|
||||
async function getCacheSaveKey(
|
||||
export async function getCacheSaveKey(
|
||||
config: Config,
|
||||
codeQlVersion: string,
|
||||
checkoutPath: string,
|
||||
logger: Logger,
|
||||
): Promise<string> {
|
||||
let runId = 1;
|
||||
let attemptId = 1;
|
||||
try {
|
||||
runId = getWorkflowRunID();
|
||||
attemptId = getWorkflowRunAttempt();
|
||||
} catch (e) {
|
||||
logger.warning(
|
||||
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
const sha = await getCommitOid(checkoutPath);
|
||||
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
|
||||
config,
|
||||
codeQlVersion,
|
||||
);
|
||||
return `${restoreKeyPrefix}${sha}`;
|
||||
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -470,7 +488,7 @@ async function getCacheSaveKey(
|
||||
* not include the commit SHA. This allows us to restore the most recent
|
||||
* compatible overlay-base database.
|
||||
*/
|
||||
async function getCacheRestoreKeyPrefix(
|
||||
export async function getCacheRestoreKeyPrefix(
|
||||
config: Config,
|
||||
codeQlVersion: string,
|
||||
): Promise<string> {
|
||||
|
||||
@@ -9,7 +9,7 @@ import * as semver from "semver";
|
||||
|
||||
import { CommandInvocationError } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { assertNever, cleanUpGlob, isBinaryAccessible } from "./util";
|
||||
import { assertNever, cleanUpPath, isBinaryAccessible } from "./util";
|
||||
|
||||
const MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
|
||||
const MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
|
||||
@@ -217,7 +217,7 @@ export async function extractTarZst(
|
||||
});
|
||||
});
|
||||
} catch (e) {
|
||||
await cleanUpGlob(dest, "extraction destination directory", logger);
|
||||
await cleanUpPath(dest, "extraction destination directory", logger);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import * as semver from "semver";
|
||||
|
||||
import { formatDuration, Logger } from "./logging";
|
||||
import * as tar from "./tar";
|
||||
import { cleanUpGlob, getErrorMessage, getRequiredEnvParam } from "./util";
|
||||
import { cleanUpPath, getErrorMessage, getRequiredEnvParam } from "./util";
|
||||
|
||||
/**
|
||||
* High watermark to use when streaming the download and extraction of the CodeQL tools.
|
||||
@@ -130,7 +130,7 @@ export async function downloadAndExtract(
|
||||
|
||||
// If we failed during processing, we want to clean up the destination directory
|
||||
// before we try again.
|
||||
await cleanUpGlob(dest, "CodeQL bundle", logger);
|
||||
await cleanUpPath(dest, "CodeQL bundle", logger);
|
||||
}
|
||||
|
||||
const toolsDownloadStart = performance.now();
|
||||
@@ -167,7 +167,7 @@ export async function downloadAndExtract(
|
||||
)}).`,
|
||||
);
|
||||
} finally {
|
||||
await cleanUpGlob(archivedBundlePath, "CodeQL bundle archive", logger);
|
||||
await cleanUpPath(archivedBundlePath, "CodeQL bundle archive", logger);
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -101,16 +101,6 @@ test("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
||||
}
|
||||
});
|
||||
|
||||
test("getAddSnippetsFlag() should return the correct flag", (t) => {
|
||||
t.deepEqual(util.getAddSnippetsFlag(true), "--sarif-add-snippets");
|
||||
t.deepEqual(util.getAddSnippetsFlag("true"), "--sarif-add-snippets");
|
||||
|
||||
t.deepEqual(util.getAddSnippetsFlag(false), "--no-sarif-add-snippets");
|
||||
t.deepEqual(util.getAddSnippetsFlag(undefined), "--no-sarif-add-snippets");
|
||||
t.deepEqual(util.getAddSnippetsFlag("false"), "--no-sarif-add-snippets");
|
||||
t.deepEqual(util.getAddSnippetsFlag("foo bar"), "--no-sarif-add-snippets");
|
||||
});
|
||||
|
||||
test("getThreadsFlag() should return the correct --threads flag", (t) => {
|
||||
const numCpus = os.cpus().length;
|
||||
|
||||
@@ -534,3 +524,12 @@ test("getCgroupCpuCountFromCpus returns undefined if the CPU file exists but is
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("checkDiskUsage succeeds and produces positive numbers", async (t) => {
|
||||
process.env["GITHUB_WORKSPACE"] = os.tmpdir();
|
||||
const diskUsage = await util.checkDiskUsage(getRunnerLogger(true));
|
||||
if (t.truthy(diskUsage)) {
|
||||
t.true(diskUsage.numAvailableBytes > 0);
|
||||
t.true(diskUsage.numTotalBytes > 0);
|
||||
}
|
||||
});
|
||||
|
||||
63
src/util.ts
63
src/util.ts
@@ -1,12 +1,11 @@
|
||||
import * as fs from "fs";
|
||||
import * as fsPromises from "fs/promises";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec/lib/exec";
|
||||
import * as io from "@actions/io";
|
||||
import checkDiskSpace from "check-disk-space";
|
||||
import * as del from "del";
|
||||
import getFolderSize from "get-folder-size";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as semver from "semver";
|
||||
@@ -167,7 +166,7 @@ export async function withTmpDir<T>(
|
||||
): Promise<T> {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "codeql-action-"));
|
||||
const result = await body(tmpDir);
|
||||
await del.deleteAsync(tmpDir, { force: true });
|
||||
await fs.promises.rm(tmpDir, { force: true, recursive: true });
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -343,21 +342,6 @@ export function getMemoryFlag(
|
||||
return `--ram=${megabytes}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the codeql flag to specify whether to add code snippets to the sarif file.
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
export function getAddSnippetsFlag(
|
||||
userInput: string | boolean | undefined,
|
||||
): string {
|
||||
if (typeof userInput === "string") {
|
||||
// have to process specifically because any non-empty string is truthy
|
||||
userInput = userInput.toLowerCase() === "true";
|
||||
}
|
||||
return userInput ? "--sarif-add-snippets" : "--no-sarif-add-snippets";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value of the codeql `--threads` flag specified for the `threads`
|
||||
* input. If no value was specified, all available threads will be used.
|
||||
@@ -756,7 +740,7 @@ export async function bundleDb(
|
||||
// from somewhere else or someone trying to make the action upload a
|
||||
// non-database file.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await del.deleteAsync(databaseBundlePath, { force: true });
|
||||
await fs.promises.rm(databaseBundlePath, { force: true });
|
||||
}
|
||||
await codeql.databaseBundle(databasePath, databaseBundlePath, dbName);
|
||||
return databaseBundlePath;
|
||||
@@ -1099,24 +1083,17 @@ export async function checkDiskUsage(
|
||||
logger: Logger,
|
||||
): Promise<DiskUsage | undefined> {
|
||||
try {
|
||||
// We avoid running the `df` binary under the hood for macOS ARM runners with SIP disabled.
|
||||
if (
|
||||
process.platform === "darwin" &&
|
||||
(process.arch === "arm" || process.arch === "arm64") &&
|
||||
!(await checkSipEnablement(logger))
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const diskUsage = await checkDiskSpace(
|
||||
const diskUsage = await fsPromises.statfs(
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||
);
|
||||
const mbInBytes = 1024 * 1024;
|
||||
const gbInBytes = 1024 * 1024 * 1024;
|
||||
if (diskUsage.free < 2 * gbInBytes) {
|
||||
|
||||
const blockSizeInBytes = diskUsage.bsize;
|
||||
const numBlocksPerMb = (1024 * 1024) / blockSizeInBytes;
|
||||
const numBlocksPerGb = (1024 * 1024 * 1024) / blockSizeInBytes;
|
||||
if (diskUsage.bavail < 2 * numBlocksPerGb) {
|
||||
const message =
|
||||
"The Actions runner is running low on disk space " +
|
||||
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
|
||||
`(${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
|
||||
if (process.env[EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
|
||||
logger.warning(message);
|
||||
} else {
|
||||
@@ -1125,8 +1102,8 @@ export async function checkDiskUsage(
|
||||
core.exportVariable(EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
|
||||
}
|
||||
return {
|
||||
numAvailableBytes: diskUsage.free,
|
||||
numTotalBytes: diskUsage.size,
|
||||
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
|
||||
numTotalBytes: diskUsage.blocks * blockSizeInBytes,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.warning(
|
||||
@@ -1263,19 +1240,13 @@ export async function checkSipEnablement(
|
||||
}
|
||||
}
|
||||
|
||||
export async function cleanUpGlob(glob: string, name: string, logger: Logger) {
|
||||
export async function cleanUpPath(file: string, name: string, logger: Logger) {
|
||||
logger.debug(`Cleaning up ${name}.`);
|
||||
try {
|
||||
const deletedPaths = await del.deleteAsync(glob, { force: true });
|
||||
if (deletedPaths.length === 0) {
|
||||
logger.warning(
|
||||
`Failed to clean up ${name}: no files found matching ${glob}.`,
|
||||
);
|
||||
} else if (deletedPaths.length === 1) {
|
||||
logger.debug(`Cleaned up ${name}.`);
|
||||
} else {
|
||||
logger.debug(`Cleaned up ${name} (${deletedPaths.length} files).`);
|
||||
}
|
||||
await fs.promises.rm(file, {
|
||||
force: true,
|
||||
recursive: true,
|
||||
});
|
||||
} catch (e) {
|
||||
logger.warning(`Failed to clean up ${name}: ${e}.`);
|
||||
}
|
||||
|
||||
@@ -2,9 +2,17 @@ import test, { ExecutionContext } from "ava";
|
||||
import * as yaml from "js-yaml";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { getCodeQLForTesting } from "./codeql";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { createStubCodeQL, getCodeQLForTesting } from "./codeql";
|
||||
import { EnvVar } from "./environment";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
setupTests,
|
||||
} from "./testing-utils";
|
||||
import {
|
||||
checkWorkflow,
|
||||
CodedError,
|
||||
formatWorkflowCause,
|
||||
formatWorkflowErrors,
|
||||
@@ -13,6 +21,7 @@ import {
|
||||
Workflow,
|
||||
WorkflowErrors,
|
||||
} from "./workflow";
|
||||
import * as workflow from "./workflow";
|
||||
|
||||
function errorCodes(
|
||||
actual: CodedError[],
|
||||
@@ -870,3 +879,78 @@ test("getCategoryInputOrThrow throws error for workflow with multiple calls to a
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const codeql = createStubCodeQL({});
|
||||
|
||||
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
|
||||
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
|
||||
validateWorkflow.resolves(undefined);
|
||||
|
||||
await checkWorkflow(getRecordingLogger(messages), codeql);
|
||||
|
||||
t.assert(
|
||||
validateWorkflow.calledOnce,
|
||||
"`checkWorkflow` unexpectedly did not call `validateWorkflow`",
|
||||
);
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
"Detected no issues with the code scanning workflow.",
|
||||
]);
|
||||
});
|
||||
|
||||
test("checkWorkflow - logs problems with workflow validation", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const codeql = createStubCodeQL({});
|
||||
|
||||
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
|
||||
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
|
||||
validateWorkflow.resolves("problem");
|
||||
|
||||
await checkWorkflow(getRecordingLogger(messages), codeql);
|
||||
|
||||
t.assert(
|
||||
validateWorkflow.calledOnce,
|
||||
"`checkWorkflow` unexpectedly did not call `validateWorkflow`",
|
||||
);
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
"Unable to validate code scanning workflow: problem",
|
||||
]);
|
||||
});
|
||||
|
||||
test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => {
|
||||
process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true";
|
||||
|
||||
const messages: LoggedMessage[] = [];
|
||||
const codeql = createStubCodeQL({});
|
||||
|
||||
sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false);
|
||||
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
|
||||
|
||||
await checkWorkflow(getRecordingLogger(messages), codeql);
|
||||
|
||||
t.assert(
|
||||
validateWorkflow.notCalled,
|
||||
"`checkWorkflow` called `validateWorkflow` unexpectedly",
|
||||
);
|
||||
t.is(messages.length, 0);
|
||||
});
|
||||
|
||||
test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const codeql = createStubCodeQL({});
|
||||
|
||||
const isDynamicWorkflow = sinon
|
||||
.stub(actionsUtil, "isDynamicWorkflow")
|
||||
.returns(true);
|
||||
const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow");
|
||||
|
||||
await checkWorkflow(getRecordingLogger(messages), codeql);
|
||||
|
||||
t.assert(isDynamicWorkflow.calledOnce);
|
||||
t.assert(
|
||||
validateWorkflow.notCalled,
|
||||
"`checkWorkflow` called `validateWorkflow` unexpectedly",
|
||||
);
|
||||
t.is(messages.length, 0);
|
||||
});
|
||||
|
||||
@@ -5,8 +5,10 @@ import zlib from "zlib";
|
||||
import * as core from "@actions/core";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import { isDynamicWorkflow } from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import { CodeQL } from "./codeql";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
getRequiredEnvParam,
|
||||
@@ -216,7 +218,7 @@ function hasWorkflowTrigger(triggerName: string, doc: Workflow): boolean {
|
||||
return Object.prototype.hasOwnProperty.call(doc.on, triggerName);
|
||||
}
|
||||
|
||||
export async function validateWorkflow(
|
||||
async function validateWorkflow(
|
||||
codeql: CodeQL,
|
||||
logger: Logger,
|
||||
): Promise<undefined | string> {
|
||||
@@ -462,3 +464,36 @@ export function getCheckoutPathInputOrThrow(
|
||||
) || getRequiredEnvParam("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* A wrapper around `validateWorkflow` which reports the outcome.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param codeql The CodeQL instance.
|
||||
*/
|
||||
export async function checkWorkflow(logger: Logger, codeql: CodeQL) {
|
||||
// Check the workflow for problems, unless `SKIP_WORKFLOW_VALIDATION` is `true`
|
||||
// or the workflow trigger is `dynamic`.
|
||||
if (
|
||||
!isDynamicWorkflow() &&
|
||||
process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] !== "true"
|
||||
) {
|
||||
core.startGroup("Validating workflow");
|
||||
const validateWorkflowResult = await internal.validateWorkflow(
|
||||
codeql,
|
||||
logger,
|
||||
);
|
||||
if (validateWorkflowResult === undefined) {
|
||||
logger.info("Detected no issues with the code scanning workflow.");
|
||||
} else {
|
||||
logger.debug(
|
||||
`Unable to validate code scanning workflow: ${validateWorkflowResult}`,
|
||||
);
|
||||
}
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
|
||||
export const internal = {
|
||||
validateWorkflow,
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user