mirror of
https://github.com/github/codeql-action.git
synced 2025-12-20 06:10:20 +08:00
Compare commits
71 Commits
default-se
...
v3.31.8
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f47c8e6a9b | ||
|
|
74951318a2 | ||
|
|
5676d1f64a | ||
|
|
c1bea80e56 | ||
|
|
2d9c0b97af | ||
|
|
827017f97b | ||
|
|
bffd034ab1 | ||
|
|
817dbfb39b | ||
|
|
793f7006bb | ||
|
|
d2e9832330 | ||
|
|
c2e4b7785f | ||
|
|
66d7f51a10 | ||
|
|
497990dfed | ||
|
|
89cb79a131 | ||
|
|
dbf6819ebd | ||
|
|
5af51f4048 | ||
|
|
e439418aab | ||
|
|
249860e323 | ||
|
|
d3ced5c96c | ||
|
|
c12d7c1f2d | ||
|
|
2e2a1cf1ef | ||
|
|
e2cca77d06 | ||
|
|
801a18bea6 | ||
|
|
1c715a714c | ||
|
|
c3d42c5d08 | ||
|
|
9031cd9330 | ||
|
|
f58938aee2 | ||
|
|
1f1c162805 | ||
|
|
7ab96a0e6f | ||
|
|
e3cb86275a | ||
|
|
f94c9befff | ||
|
|
e5971bdba6 | ||
|
|
c5a9d29dc9 | ||
|
|
9f1109665d | ||
|
|
f8f60f3a2b | ||
|
|
f4d10b9ef7 | ||
|
|
5d5cd550d3 | ||
|
|
c6eb09db21 | ||
|
|
09db9044dc | ||
|
|
d3cd47d8d6 | ||
|
|
8e9caa5100 | ||
|
|
23a6333b88 | ||
|
|
c503cb4fbb | ||
|
|
c2805e0a04 | ||
|
|
c0d3370b54 | ||
|
|
ddd0dc746a | ||
|
|
2f607936ce | ||
|
|
37e7dfbaa0 | ||
|
|
d198d2fabf | ||
|
|
9e3918e481 | ||
|
|
7dd1575dac | ||
|
|
28fc48d83c | ||
|
|
12c6008004 | ||
|
|
d3019effb0 | ||
|
|
42213152a8 | ||
|
|
e677e67801 | ||
|
|
5f3f3164ad | ||
|
|
ba42101490 | ||
|
|
f11af5849b | ||
|
|
ba5430dc86 | ||
|
|
13e883e119 | ||
|
|
755f44910c | ||
|
|
948223fe01 | ||
|
|
a37add20d4 | ||
|
|
ab163cf08b | ||
|
|
319796f085 | ||
|
|
bd1ac56295 | ||
|
|
a8d1ac45b9 | ||
|
|
c551c50310 | ||
|
|
01f1a24033 | ||
|
|
b264e15259 |
2
.github/actions/check-sarif/action.yml
vendored
2
.github/actions/check-sarif/action.yml
vendored
@@ -16,5 +16,5 @@ inputs:
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: index.js
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
name: Verify that the best-effort debug artifact scan completed
|
||||
description: Verifies that the best-effort debug artifact scan completed successfully during tests
|
||||
runs:
|
||||
using: node24
|
||||
main: index.js
|
||||
post: post.js
|
||||
@@ -1,2 +0,0 @@
|
||||
// The main step is a no-op, since we can only verify artifact scan completion in the post step.
|
||||
console.log("Will verify artifact scan completion in the post step.");
|
||||
@@ -1,11 +0,0 @@
|
||||
// Post step - runs after the workflow completes, when artifact scan has finished
|
||||
const process = require("process");
|
||||
|
||||
const scanFinished = process.env.CODEQL_ACTION_ARTIFACT_SCAN_FINISHED;
|
||||
|
||||
if (scanFinished !== "true") {
|
||||
console.error("Error: Best-effort artifact scan did not complete. Expected CODEQL_ACTION_ARTIFACT_SCAN_FINISHED=true");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log("✓ Best-effort artifact scan completed successfully");
|
||||
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@@ -34,7 +34,7 @@ Products:
|
||||
|
||||
Environments:
|
||||
|
||||
- **Dotcom** - Impacts CodeQL workflows on `github.com` and/or GitHub Enterprise Cloud with Data Residency.
|
||||
- **Dotcom** - Impacts CodeQL workflows on `github.com`.
|
||||
- **GHES** - Impacts CodeQL workflows on GitHub Enterprise Server.
|
||||
- **Testing/None** - This change does not impact any CodeQL workflows in production.
|
||||
|
||||
|
||||
2
.github/workflows/__bundle-zstd.yml
generated
vendored
2
.github/workflows/__bundle-zstd.yml
generated
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__config-export.yml
generated
vendored
2
.github/workflows/__config-export.yml
generated
vendored
@@ -67,7 +67,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -78,7 +78,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -99,7 +99,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
1
.github/workflows/__global-proxy.yml
generated
vendored
1
.github/workflows/__global-proxy.yml
generated
vendored
@@ -76,7 +76,6 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
container:
|
||||
image: ubuntu:22.04
|
||||
|
||||
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
6
.github/workflows/__quality-queries.yml
generated
vendored
6
.github/workflows/__quality-queries.yml
generated
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -91,14 +91,14 @@ jobs:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ac793fdd38cc468a4dd57246fa9d0e868aba9085 # v1.270.0
|
||||
uses: ruby/setup-ruby@8aeb6ff8030dd539317f8e1769a044873b56ea71 # v1.268.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -6,11 +6,6 @@ env:
|
||||
# Diff informed queries add an additional query filter which is not yet
|
||||
# taken into account by these tests.
|
||||
CODEQL_ACTION_DIFF_INFORMED_QUERIES: false
|
||||
# Specify overlay enablement manually to ensure stability around the exclude-from-incremental
|
||||
# query filter. Here we only enable for the default code scanning suite.
|
||||
CODEQL_ACTION_OVERLAY_ANALYSIS: true
|
||||
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
|
||||
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
@@ -58,8 +58,6 @@ jobs:
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: '9.x'
|
||||
- name: Assert best-effort artifact scan completed
|
||||
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
@@ -85,7 +83,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
- name: Check expected artifacts exist
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
|
||||
4
.github/workflows/debug-artifacts-safe.yml
vendored
4
.github/workflows/debug-artifacts-safe.yml
vendored
@@ -54,8 +54,6 @@ jobs:
|
||||
uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: '9.x'
|
||||
- name: Assert best-effort artifact scan completed
|
||||
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
@@ -79,7 +77,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v6
|
||||
- name: Check expected artifacts exist
|
||||
run: |
|
||||
VERSIONS="stable-v2.20.3 default linked nightly-latest"
|
||||
|
||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -142,7 +142,7 @@ jobs:
|
||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@v2.2.1
|
||||
uses: actions/create-github-app-token@v2.2.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
2
.github/workflows/rollback-release.yml
vendored
2
.github/workflows/rollback-release.yml
vendored
@@ -137,7 +137,7 @@ jobs:
|
||||
|
||||
- name: Generate token
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: actions/create-github-app-token@v2.2.1
|
||||
uses: actions/create-github-app-token@v2.2.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
@@ -93,7 +93,7 @@ jobs:
|
||||
pull-requests: write # needed to create pull request
|
||||
steps:
|
||||
- name: Generate token
|
||||
uses: actions/create-github-app-token@v2.2.1
|
||||
uses: actions/create-github-app-token@v2.2.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.AUTOMATION_APP_ID }}
|
||||
|
||||
41
CHANGELOG.md
41
CHANGELOG.md
@@ -2,64 +2,56 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.9 - 16 Dec 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.8 - 11 Dec 2025
|
||||
## 3.31.8 - 11 Dec 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.8. [#3354](https://github.com/github/codeql-action/pull/3354)
|
||||
|
||||
## 4.31.7 - 05 Dec 2025
|
||||
## 3.31.7 - 05 Dec 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.7. [#3343](https://github.com/github/codeql-action/pull/3343)
|
||||
|
||||
## 4.31.6 - 01 Dec 2025
|
||||
## 3.31.6 - 01 Dec 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.5 - 24 Nov 2025
|
||||
## 3.31.5 - 24 Nov 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.6. [#3321](https://github.com/github/codeql-action/pull/3321)
|
||||
|
||||
## 4.31.4 - 18 Nov 2025
|
||||
## 3.31.4 - 18 Nov 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.3 - 13 Nov 2025
|
||||
## 3.31.3 - 13 Nov 2025
|
||||
|
||||
- CodeQL Action v3 will be deprecated in December 2026. The Action now logs a warning for customers who are running v3 but could be running v4. For more information, see [Upcoming deprecation of CodeQL Action v3](https://github.blog/changelog/2025-10-28-upcoming-deprecation-of-codeql-action-v3/).
|
||||
- Update default CodeQL bundle version to 2.23.5. [#3288](https://github.com/github/codeql-action/pull/3288)
|
||||
|
||||
## 4.31.2 - 30 Oct 2025
|
||||
## 3.31.2 - 30 Oct 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.1 - 30 Oct 2025
|
||||
## 3.31.1 - 30 Oct 2025
|
||||
|
||||
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
|
||||
|
||||
## 4.31.0 - 24 Oct 2025
|
||||
## 3.31.0 - 24 Oct 2025
|
||||
|
||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
||||
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
|
||||
|
||||
## 4.30.9 - 17 Oct 2025
|
||||
## 3.30.9 - 17 Oct 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
|
||||
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
|
||||
|
||||
## 4.30.8 - 10 Oct 2025
|
||||
## 3.30.8 - 10 Oct 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.30.7 - 06 Oct 2025
|
||||
## 3.30.7 - 06 Oct 2025
|
||||
|
||||
- [v4+ only] The CodeQL Action now runs on Node.js v24. [#3169](https://github.com/github/codeql-action/pull/3169)
|
||||
No user facing changes.
|
||||
|
||||
## 3.30.6 - 02 Oct 2025
|
||||
|
||||
@@ -295,17 +287,13 @@ No user facing changes.
|
||||
## 3.26.12 - 07 Oct 2024
|
||||
|
||||
- _Upcoming breaking change_: Add a deprecation warning for customers using CodeQL version 2.14.5 and earlier. These versions of CodeQL were discontinued on 24 September 2024 alongside GitHub Enterprise Server 3.10, and will be unsupported by CodeQL Action versions 3.27.0 and later and versions 2.27.0 and later. [#2520](https://github.com/github/codeql-action/pull/2520)
|
||||
|
||||
- If you are using one of these versions, please update to CodeQL CLI version 2.14.6 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version.
|
||||
|
||||
- Alternatively, if you want to continue using a version of the CodeQL CLI between 2.13.5 and 2.14.5, you can replace `github/codeql-action/*@v3` by `github/codeql-action/*@v3.26.11` and `github/codeql-action/*@v2` by `github/codeql-action/*@v2.26.11` in your code scanning workflow to ensure you continue using this version of the CodeQL Action.
|
||||
|
||||
## 3.26.11 - 03 Oct 2024
|
||||
|
||||
- _Upcoming breaking change_: Add support for using `actions/download-artifact@v4` to programmatically consume CodeQL Action debug artifacts.
|
||||
|
||||
Starting November 30, 2024, GitHub.com customers will [no longer be able to use `actions/download-artifact@v3`](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/). Therefore, to avoid breakage, customers who programmatically download the CodeQL Action debug artifacts should set the `CODEQL_ACTION_ARTIFACT_V4_UPGRADE` environment variable to `true` and bump `actions/download-artifact@v3` to `actions/download-artifact@v4` in their workflows. The CodeQL Action will enable this behavior by default in early November and workflows that have not yet bumped `actions/download-artifact@v3` to `actions/download-artifact@v4` will begin failing then.
|
||||
|
||||
This change is currently unavailable for GitHub Enterprise Server customers, as `actions/upload-artifact@v4` and `actions/download-artifact@v4` are not yet compatible with GHES.
|
||||
- Update default CodeQL bundle version to 2.19.1. [#2519](https://github.com/github/codeql-action/pull/2519)
|
||||
|
||||
@@ -428,12 +416,9 @@ No user facing changes.
|
||||
## 3.25.0 - 15 Apr 2024
|
||||
|
||||
- The deprecated feature for extracting dependencies for a Python analysis has been removed. [#2224](https://github.com/github/codeql-action/pull/2224)
|
||||
|
||||
As a result, the following inputs and environment variables are now ignored:
|
||||
|
||||
- The `setup-python-dependencies` input to the `init` Action
|
||||
- The `CODEQL_ACTION_DISABLE_PYTHON_DEPENDENCY_INSTALLATION` environment variable
|
||||
|
||||
We recommend removing any references to these from your workflows. For more information, see the release notes for CodeQL Action v3.23.0 and v2.23.0.
|
||||
- Automatically overwrite an existing database if found on the filesystem. [#2229](https://github.com/github/codeql-action/pull/2229)
|
||||
- Bump the minimum CodeQL bundle version to 2.12.6. [#2232](https://github.com/github/codeql-action/pull/2232)
|
||||
|
||||
@@ -94,6 +94,6 @@ outputs:
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: "../lib/analyze-action.js"
|
||||
post: "../lib/analyze-action-post.js"
|
||||
|
||||
@@ -15,5 +15,5 @@ inputs:
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -165,6 +165,6 @@ outputs:
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary used for analysis
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: '../lib/init-action.js'
|
||||
post: '../lib/init-action-post.js'
|
||||
|
||||
43714
lib/analyze-action-post.js
generated
43714
lib/analyze-action-post.js
generated
File diff suppressed because it is too large
Load Diff
37955
lib/analyze-action.js
generated
37955
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
36455
lib/autobuild-action.js
generated
36455
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
43862
lib/init-action-post.js
generated
43862
lib/init-action-post.js
generated
File diff suppressed because it is too large
Load Diff
37926
lib/init-action.js
generated
37926
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
36442
lib/resolve-environment-action.js
generated
36442
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
36449
lib/setup-codeql-action.js
generated
36449
lib/setup-codeql-action.js
generated
File diff suppressed because it is too large
Load Diff
43035
lib/start-proxy-action-post.js
generated
43035
lib/start-proxy-action-post.js
generated
File diff suppressed because it is too large
Load Diff
36684
lib/start-proxy-action.js
generated
36684
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
36484
lib/upload-lib.js
generated
36484
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
43602
lib/upload-sarif-action-post.js
generated
43602
lib/upload-sarif-action-post.js
generated
File diff suppressed because it is too large
Load Diff
36471
lib/upload-sarif-action.js
generated
36471
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
1218
package-lock.json
generated
1218
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
22
package.json
22
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.31.10",
|
||||
"version": "3.31.8",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -24,12 +24,12 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^5.0.1",
|
||||
"@actions/artifact": "^4.0.0",
|
||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||
"@actions/cache": "^5.0.1",
|
||||
"@actions/core": "^2.0.1",
|
||||
"@actions/exec": "^2.0.0",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/glob": "^0.5.0",
|
||||
"@actions/http-client": "^3.0.0",
|
||||
"@actions/io": "^2.0.0",
|
||||
@@ -43,7 +43,7 @@
|
||||
"js-yaml": "^4.1.1",
|
||||
"jsonschema": "1.4.1",
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.3",
|
||||
"node-forge": "^1.3.2",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
@@ -51,7 +51,7 @@
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^2.0.0",
|
||||
"@eslint/eslintrc": "^3.3.3",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@eslint/js": "^9.39.1",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^16.0.0",
|
||||
"@types/archiver": "^7.0.0",
|
||||
@@ -61,16 +61,16 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^21.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.49.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.48.0",
|
||||
"@typescript-eslint/parser": "^8.48.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.27.1",
|
||||
"esbuild": "^0.27.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
"eslint-plugin-github": "^5.1.8",
|
||||
"eslint-plugin-import": "2.29.1",
|
||||
"eslint-plugin-jsdoc": "^61.5.0",
|
||||
"eslint-plugin-jsdoc": "^61.4.1",
|
||||
"eslint-plugin-no-async-foreach": "^0.1.1",
|
||||
"glob": "^11.1.0",
|
||||
"nock": "^14.0.10",
|
||||
|
||||
@@ -27,7 +27,7 @@ steps:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
@@ -12,7 +12,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -25,7 +25,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -18,7 +18,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -23,7 +23,6 @@ services:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -11,7 +11,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -39,7 +39,7 @@ steps:
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -47,14 +47,14 @@ steps:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ac793fdd38cc468a4dd57246fa9d0e868aba9085 # v1.270.0
|
||||
uses: ruby/setup-ruby@8aeb6ff8030dd539317f8e1769a044873b56ea71 # v1.268.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -21,5 +21,5 @@ outputs:
|
||||
environment:
|
||||
description: The inferred build environment configuration.
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: '../lib/resolve-environment-action.js'
|
||||
|
||||
@@ -35,5 +35,5 @@ outputs:
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary that was installed.
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: '../lib/setup-codeql-action.js'
|
||||
|
||||
@@ -19,18 +19,20 @@ import { getApiDetails, getGitHubVersion } from "./api-client";
|
||||
import { runAutobuild } from "./autobuild";
|
||||
import { getTotalCacheSize, shouldStoreCache } from "./caching-utils";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import {
|
||||
cleanupAndUploadDatabases,
|
||||
DatabaseUploadResult,
|
||||
} from "./database-upload";
|
||||
Config,
|
||||
getConfig,
|
||||
isCodeQualityEnabled,
|
||||
isCodeScanningEnabled,
|
||||
} from "./config-utils";
|
||||
import { cleanupAndUploadDatabases } from "./database-upload";
|
||||
import {
|
||||
DependencyCacheUploadStatusReport,
|
||||
uploadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Features } from "./feature-flags";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay-database-utils";
|
||||
@@ -57,13 +59,15 @@ interface AnalysisStatusReport
|
||||
extends uploadLib.UploadStatusReport,
|
||||
QueriesStatusReport {}
|
||||
|
||||
interface DependencyCachingUploadStatusReport {
|
||||
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
|
||||
}
|
||||
|
||||
interface FinishStatusReport
|
||||
extends StatusReportBase,
|
||||
DatabaseCreationTimings,
|
||||
AnalysisStatusReport {
|
||||
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
|
||||
database_upload_results: DatabaseUploadResult[];
|
||||
}
|
||||
AnalysisStatusReport,
|
||||
DependencyCachingUploadStatusReport {}
|
||||
|
||||
interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
|
||||
/** Size of TRAP caches that we uploaded, in bytes. */
|
||||
@@ -82,7 +86,6 @@ async function sendStatusReport(
|
||||
didUploadTrapCaches: boolean,
|
||||
trapCacheCleanup: TrapCacheCleanupStatusReport | undefined,
|
||||
dependencyCacheResults: DependencyCacheUploadStatusReport | undefined,
|
||||
databaseUploadResults: DatabaseUploadResult[],
|
||||
logger: Logger,
|
||||
) {
|
||||
const status = getActionsStatus(error, stats?.analyze_failure_language);
|
||||
@@ -103,7 +106,6 @@ async function sendStatusReport(
|
||||
...(dbCreationTimings || {}),
|
||||
...(trapCacheCleanup || {}),
|
||||
dependency_caching_upload_results: dependencyCacheResults,
|
||||
database_upload_results: databaseUploadResults,
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
@@ -221,7 +223,6 @@ async function run() {
|
||||
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined;
|
||||
let databaseUploadResults: DatabaseUploadResult[] = [];
|
||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||
|
||||
// Make inputs accessible in the `post` step, details at
|
||||
@@ -357,15 +358,46 @@ async function run() {
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
||||
);
|
||||
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
||||
);
|
||||
} else if (uploadKind === "always") {
|
||||
uploadResults = {};
|
||||
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
);
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeQuality] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
uploadResults = {};
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
|
||||
// Set the SARIF id outputs only if we have results for them, to avoid
|
||||
// having keys with empty values in the action output.
|
||||
@@ -393,7 +425,7 @@ async function run() {
|
||||
// Possibly upload the database bundles for remote queries.
|
||||
// Note: Take care with the ordering of this call since databases may be cleaned up
|
||||
// at the `overlay` or `clear` level.
|
||||
databaseUploadResults = await cleanupAndUploadDatabases(
|
||||
await cleanupAndUploadDatabases(
|
||||
repositoryNwo,
|
||||
codeql,
|
||||
config,
|
||||
@@ -465,7 +497,6 @@ async function run() {
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
databaseUploadResults,
|
||||
logger,
|
||||
);
|
||||
return;
|
||||
@@ -488,7 +519,6 @@ async function run() {
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
databaseUploadResults,
|
||||
logger,
|
||||
);
|
||||
} else if (runStats !== undefined) {
|
||||
@@ -502,7 +532,6 @@ async function run() {
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
databaseUploadResults,
|
||||
logger,
|
||||
);
|
||||
} else {
|
||||
@@ -516,7 +545,6 @@ async function run() {
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
databaseUploadResults,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -95,14 +95,14 @@ test("getGitHubVersion for different domain", async (t) => {
|
||||
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||
});
|
||||
|
||||
test("getGitHubVersion for GHEC-DR", async (t) => {
|
||||
test("getGitHubVersion for GHE_DOTCOM", async (t) => {
|
||||
mockGetMetaVersionHeader("ghe.com");
|
||||
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
|
||||
auth: "",
|
||||
url: "https://foo.ghe.com",
|
||||
apiURL: undefined,
|
||||
});
|
||||
t.deepEqual({ type: util.GitHubVariant.GHEC_DR }, gheDotcom);
|
||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||
});
|
||||
|
||||
test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
|
||||
|
||||
@@ -125,7 +125,7 @@ export async function getGitHubVersionFromApi(
|
||||
}
|
||||
|
||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
||||
return { type: GitHubVariant.GHEC_DR };
|
||||
return { type: GitHubVariant.GHE_DOTCOM };
|
||||
}
|
||||
|
||||
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] as string;
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
|
||||
import { scanArtifactsForTokens } from "./artifact-scanner";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
|
||||
|
||||
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file with a fake GitHub token
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(
|
||||
testFile,
|
||||
"This is a test file with token ghp_1234567890123456789012345678901234AB",
|
||||
);
|
||||
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/Found 1 potential GitHub token.*Personal Access Token/,
|
||||
);
|
||||
t.regex(error?.message || "", /test\.txt/);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("scanArtifactsForTokens handles files without tokens", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file without tokens
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(
|
||||
testFile,
|
||||
"This is a test file without any sensitive data",
|
||||
);
|
||||
|
||||
await t.notThrowsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
if (os.platform() !== "win32") {
|
||||
test("scanArtifactsForTokens finds token in debug artifacts", async (t) => {
|
||||
t.timeout(15000); // 15 seconds
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages, { logToConsole: false });
|
||||
// The zip here is a regression test based on
|
||||
// https://github.com/github/codeql-action/security/advisories/GHSA-vqf5-2xx6-9wfm
|
||||
const testZip = path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
"src",
|
||||
"testdata",
|
||||
"debug-artifacts-with-fake-token.zip",
|
||||
);
|
||||
|
||||
// This zip file contains a nested structure with a fake token in:
|
||||
// my-db-java-partial.zip/trap/java/invocations/kotlin.9017231652989744319.trap
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testZip], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/Found.*potential GitHub token/,
|
||||
"Should detect token in nested zip",
|
||||
);
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/kotlin\.9017231652989744319\.trap/,
|
||||
"Should report the .trap file containing the token",
|
||||
);
|
||||
|
||||
const logOutput = messages.map((msg) => msg.message).join("\n");
|
||||
t.regex(
|
||||
logOutput,
|
||||
/^Extracting gz file: .*\.gz$/m,
|
||||
"Logs should show that .gz files were extracted",
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -1,379 +0,0 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import * as exec from "@actions/exec";
|
||||
|
||||
import { Logger } from "./logging";
|
||||
import { getErrorMessage } from "./util";
|
||||
|
||||
/**
|
||||
* GitHub token patterns to scan for.
|
||||
* These patterns match various GitHub token formats.
|
||||
*/
|
||||
const GITHUB_TOKEN_PATTERNS = [
|
||||
{
|
||||
name: "Personal Access Token",
|
||||
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "OAuth Access Token",
|
||||
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "User-to-Server Token",
|
||||
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "Server-to-Server Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "Refresh Token",
|
||||
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "App Installation Access Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
|
||||
},
|
||||
];
|
||||
|
||||
interface TokenFinding {
|
||||
tokenType: string;
|
||||
filePath: string;
|
||||
}
|
||||
|
||||
interface ScanResult {
|
||||
scannedFiles: number;
|
||||
findings: TokenFinding[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a file for GitHub tokens.
|
||||
*
|
||||
* @param filePath Path to the file to scan
|
||||
* @param relativePath Relative path for display purposes
|
||||
* @param logger Logger instance
|
||||
* @returns Array of token findings in the file
|
||||
*/
|
||||
function scanFileForTokens(
|
||||
filePath: string,
|
||||
relativePath: string,
|
||||
logger: Logger,
|
||||
): TokenFinding[] {
|
||||
const findings: TokenFinding[] = [];
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
|
||||
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
|
||||
const matches = content.match(pattern);
|
||||
if (matches) {
|
||||
for (let i = 0; i < matches.length; i++) {
|
||||
findings.push({ tokenType: name, filePath: relativePath });
|
||||
}
|
||||
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
return findings;
|
||||
} catch (e) {
|
||||
// If we can't read the file as text, it's likely binary or inaccessible
|
||||
logger.debug(
|
||||
`Could not scan file ${filePath} for tokens: ${getErrorMessage(e)}`,
|
||||
);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
|
||||
*
|
||||
* @param archivePath Path to the archive file
|
||||
* @param relativeArchivePath Relative path of the archive for display
|
||||
* @param extractDir Directory to extract to
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth (to prevent infinite loops)
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanArchiveFile(
|
||||
archivePath: string,
|
||||
relativeArchivePath: string,
|
||||
extractDir: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const MAX_DEPTH = 10; // Prevent infinite recursion
|
||||
if (depth > MAX_DEPTH) {
|
||||
throw new Error(
|
||||
`Maximum archive extraction depth (${MAX_DEPTH}) reached for ${archivePath}`,
|
||||
);
|
||||
}
|
||||
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
try {
|
||||
const tempExtractDir = fs.mkdtempSync(
|
||||
path.join(extractDir, `extract-${depth}-`),
|
||||
);
|
||||
|
||||
// Determine archive type and extract accordingly
|
||||
const fileName = path.basename(archivePath).toLowerCase();
|
||||
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
|
||||
// Extract tar.gz files
|
||||
logger.debug(`Extracting tar.gz file: ${archivePath}`);
|
||||
await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], {
|
||||
silent: true,
|
||||
});
|
||||
} else if (fileName.endsWith(".tar.zst")) {
|
||||
// Extract tar.zst files
|
||||
logger.debug(`Extracting tar.zst file: ${archivePath}`);
|
||||
await exec.exec(
|
||||
"tar",
|
||||
["--zstd", "-xf", archivePath, "-C", tempExtractDir],
|
||||
{
|
||||
silent: true,
|
||||
},
|
||||
);
|
||||
} else if (fileName.endsWith(".zst")) {
|
||||
// Extract .zst files (single file compression)
|
||||
logger.debug(`Extracting zst file: ${archivePath}`);
|
||||
const outputFile = path.join(
|
||||
tempExtractDir,
|
||||
path.basename(archivePath, ".zst"),
|
||||
);
|
||||
await exec.exec("zstd", ["-d", archivePath, "-o", outputFile], {
|
||||
silent: true,
|
||||
});
|
||||
} else if (fileName.endsWith(".gz")) {
|
||||
// Extract .gz files (single file compression)
|
||||
logger.debug(`Extracting gz file: ${archivePath}`);
|
||||
const outputFile = path.join(
|
||||
tempExtractDir,
|
||||
path.basename(archivePath, ".gz"),
|
||||
);
|
||||
await exec.exec("gunzip", ["-c", archivePath], {
|
||||
outStream: fs.createWriteStream(outputFile),
|
||||
silent: true,
|
||||
});
|
||||
} else if (fileName.endsWith(".zip")) {
|
||||
// Extract zip files
|
||||
logger.debug(`Extracting zip file: ${archivePath}`);
|
||||
await exec.exec(
|
||||
"unzip",
|
||||
["-q", "-o", archivePath, "-d", tempExtractDir],
|
||||
{
|
||||
silent: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
// Scan the extracted contents
|
||||
const scanResult = await scanDirectory(
|
||||
tempExtractDir,
|
||||
relativeArchivePath,
|
||||
logger,
|
||||
depth + 1,
|
||||
);
|
||||
result.scannedFiles += scanResult.scannedFiles;
|
||||
result.findings.push(...scanResult.findings);
|
||||
|
||||
// Clean up extracted files
|
||||
fs.rmSync(tempExtractDir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a single file, including recursive archive extraction if applicable.
|
||||
*
|
||||
* @param fullPath Full path to the file
|
||||
* @param relativePath Relative path for display
|
||||
* @param extractDir Directory to use for extraction (for archive files)
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanFile(
|
||||
fullPath: string,
|
||||
relativePath: string,
|
||||
extractDir: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 1,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
// Check if it's an archive file and recursively scan it
|
||||
const fileName = path.basename(fullPath).toLowerCase();
|
||||
const isArchive =
|
||||
fileName.endsWith(".zip") ||
|
||||
fileName.endsWith(".tar.gz") ||
|
||||
fileName.endsWith(".tgz") ||
|
||||
fileName.endsWith(".tar.zst") ||
|
||||
fileName.endsWith(".zst") ||
|
||||
fileName.endsWith(".gz");
|
||||
|
||||
if (isArchive) {
|
||||
const archiveResult = await scanArchiveFile(
|
||||
fullPath,
|
||||
relativePath,
|
||||
extractDir,
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += archiveResult.scannedFiles;
|
||||
result.findings.push(...archiveResult.findings);
|
||||
}
|
||||
|
||||
// Scan the file itself for tokens (unless it's a pure binary archive format)
|
||||
const fileFindings = scanFileForTokens(fullPath, relativePath, logger);
|
||||
result.findings.push(...fileFindings);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively scans a directory for GitHub tokens.
|
||||
*
|
||||
* @param dirPath Directory path to scan
|
||||
* @param baseRelativePath Base relative path for computing display paths
|
||||
* @param logger Logger instance
|
||||
* @param depth Current recursion depth
|
||||
* @returns Scan results
|
||||
*/
|
||||
async function scanDirectory(
|
||||
dirPath: string,
|
||||
baseRelativePath: string,
|
||||
logger: Logger,
|
||||
depth: number = 0,
|
||||
): Promise<ScanResult> {
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
const relativePath = path.join(baseRelativePath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const subResult = await scanDirectory(
|
||||
fullPath,
|
||||
relativePath,
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += subResult.scannedFiles;
|
||||
result.findings.push(...subResult.findings);
|
||||
} else if (entry.isFile()) {
|
||||
const fileResult = await scanFile(
|
||||
fullPath,
|
||||
relativePath,
|
||||
path.dirname(fullPath),
|
||||
logger,
|
||||
depth,
|
||||
);
|
||||
result.scannedFiles += fileResult.scannedFiles;
|
||||
result.findings.push(...fileResult.findings);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a list of files and directories for GitHub tokens.
|
||||
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
|
||||
*
|
||||
* @param filesToScan List of file paths to scan
|
||||
* @param logger Logger instance
|
||||
* @returns Scan results
|
||||
*/
|
||||
export async function scanArtifactsForTokens(
|
||||
filesToScan: string[],
|
||||
logger: Logger,
|
||||
): Promise<void> {
|
||||
logger.info(
|
||||
"Starting best-effort check for potential GitHub tokens in debug artifacts (for testing purposes only)...",
|
||||
);
|
||||
|
||||
const result: ScanResult = {
|
||||
scannedFiles: 0,
|
||||
findings: [],
|
||||
};
|
||||
|
||||
// Create a temporary directory for extraction
|
||||
const tempScanDir = fs.mkdtempSync(path.join(os.tmpdir(), "artifact-scan-"));
|
||||
|
||||
try {
|
||||
for (const filePath of filesToScan) {
|
||||
const stats = fs.statSync(filePath);
|
||||
const fileName = path.basename(filePath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
const dirResult = await scanDirectory(filePath, fileName, logger);
|
||||
result.scannedFiles += dirResult.scannedFiles;
|
||||
result.findings.push(...dirResult.findings);
|
||||
} else if (stats.isFile()) {
|
||||
const fileResult = await scanFile(
|
||||
filePath,
|
||||
fileName,
|
||||
tempScanDir,
|
||||
logger,
|
||||
);
|
||||
result.scannedFiles += fileResult.scannedFiles;
|
||||
result.findings.push(...fileResult.findings);
|
||||
}
|
||||
}
|
||||
|
||||
// Compute statistics from findings
|
||||
const tokenTypesCounts = new Map<string, number>();
|
||||
const filesWithTokens = new Set<string>();
|
||||
for (const finding of result.findings) {
|
||||
tokenTypesCounts.set(
|
||||
finding.tokenType,
|
||||
(tokenTypesCounts.get(finding.tokenType) || 0) + 1,
|
||||
);
|
||||
filesWithTokens.add(finding.filePath);
|
||||
}
|
||||
|
||||
const tokenTypesSummary = Array.from(tokenTypesCounts.entries())
|
||||
.map(([type, count]) => `${count} ${type}${count > 1 ? "s" : ""}`)
|
||||
.join(", ");
|
||||
|
||||
const baseSummary = `scanned ${result.scannedFiles} files, found ${result.findings.length} potential token(s) in ${filesWithTokens.size} file(s)`;
|
||||
const summaryWithTypes = tokenTypesSummary
|
||||
? `${baseSummary} (${tokenTypesSummary})`
|
||||
: baseSummary;
|
||||
|
||||
logger.info(`Artifact check complete: ${summaryWithTypes}`);
|
||||
|
||||
if (result.findings.length > 0) {
|
||||
const fileList = Array.from(filesWithTokens).join(", ");
|
||||
throw new Error(
|
||||
`Found ${result.findings.length} potential GitHub token(s) (${tokenTypesSummary}) in debug artifacts at: ${fileList}. This is a best-effort check for testing purposes only.`,
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
fs.rmSync(tempScanDir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Could not clean up temporary scan directory: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import * as configUtils from "./config-utils";
|
||||
import * as errorMessages from "./error-messages";
|
||||
import { Feature } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { GitVersionInfo } from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {
|
||||
@@ -979,7 +978,6 @@ interface OverlayDatabaseModeTestSetup {
|
||||
languages: Language[];
|
||||
codeqlVersion: string;
|
||||
gitRoot: string | undefined;
|
||||
gitVersion: GitVersionInfo | undefined;
|
||||
codeScanningConfig: configUtils.UserConfig;
|
||||
diskUsage: DiskUsage | undefined;
|
||||
memoryFlagValue: number;
|
||||
@@ -994,10 +992,6 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
|
||||
languages: [KnownLanguage.javascript],
|
||||
codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION,
|
||||
gitRoot: "/some/git/root",
|
||||
gitVersion: new GitVersionInfo(
|
||||
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
|
||||
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
|
||||
),
|
||||
codeScanningConfig: {},
|
||||
diskUsage: {
|
||||
numAvailableBytes: 50_000_000_000,
|
||||
@@ -1076,7 +1070,6 @@ const getOverlayDatabaseModeMacro = test.macro({
|
||||
setup.buildMode,
|
||||
undefined,
|
||||
setup.codeScanningConfig,
|
||||
setup.gitVersion,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -1780,32 +1773,6 @@ test(
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"Fallback due to old git version",
|
||||
{
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
gitVersion: new GitVersionInfo("2.30.0", "2.30.0"), // Version below required 2.38.0
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"Fallback when git version cannot be determined",
|
||||
{
|
||||
overlayDatabaseEnvVar: "overlay",
|
||||
gitVersion: undefined,
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
},
|
||||
);
|
||||
|
||||
// Exercise language-specific overlay analysis features code paths
|
||||
for (const language in KnownLanguage) {
|
||||
test(
|
||||
|
||||
@@ -22,19 +22,11 @@ import {
|
||||
parseUserConfig,
|
||||
UserConfig,
|
||||
} from "./config/db-config";
|
||||
import { addDiagnostic, makeTelemetryDiagnostic } from "./diagnostics";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import * as errorMessages from "./error-messages";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { RepositoryProperties } from "./feature-flags/properties";
|
||||
import {
|
||||
getGitRoot,
|
||||
getGitVersionOrThrow,
|
||||
GIT_MINIMUM_VERSION_FOR_OVERLAY,
|
||||
GitVersionInfo,
|
||||
isAnalyzingDefaultBranch,
|
||||
} from "./git-utils";
|
||||
import { getGitRoot, isAnalyzingDefaultBranch } from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
@@ -53,8 +45,6 @@ import {
|
||||
isDefined,
|
||||
checkDiskUsage,
|
||||
getCodeQLMemoryLimit,
|
||||
getErrorMessage,
|
||||
isInTestMode,
|
||||
} from "./util";
|
||||
|
||||
export * from "./config/db-config";
|
||||
@@ -719,7 +709,6 @@ export async function getOverlayDatabaseMode(
|
||||
buildMode: BuildMode | undefined,
|
||||
ramInput: string | undefined,
|
||||
codeScanningConfig: UserConfig,
|
||||
gitVersion: GitVersionInfo | undefined,
|
||||
logger: Logger,
|
||||
): Promise<{
|
||||
overlayDatabaseMode: OverlayDatabaseMode;
|
||||
@@ -822,22 +811,6 @@ export async function getOverlayDatabaseMode(
|
||||
);
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if (gitVersion === undefined) {
|
||||
logger.warning(
|
||||
`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||
"the Git version could not be determined. " +
|
||||
"Falling back to creating a normal full database instead.",
|
||||
);
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) {
|
||||
logger.warning(
|
||||
`Cannot build an ${overlayDatabaseMode} database because ` +
|
||||
`the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. ` +
|
||||
"Falling back to creating a normal full database instead.",
|
||||
);
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
|
||||
return {
|
||||
overlayDatabaseMode,
|
||||
@@ -930,24 +903,6 @@ export async function initConfig(
|
||||
config.computedConfig["query-filters"] = [];
|
||||
}
|
||||
|
||||
let gitVersion: GitVersionInfo | undefined = undefined;
|
||||
try {
|
||||
gitVersion = await getGitVersionOrThrow();
|
||||
logger.info(`Using Git version ${gitVersion.fullVersion}`);
|
||||
await logGitVersionTelemetry(config, gitVersion);
|
||||
} catch (e) {
|
||||
logger.warning(`Could not determine Git version: ${getErrorMessage(e)}`);
|
||||
// Throw the error in test mode so it's more visible, unless the environment
|
||||
// variable is set to tolerate this, for example because we're running in a
|
||||
// Docker container where git may not be available.
|
||||
if (
|
||||
isInTestMode() &&
|
||||
process.env[EnvVar.TOLERATE_MISSING_GIT_VERSION] !== "true"
|
||||
) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// The choice of overlay database mode depends on the selection of languages
|
||||
// and queries, which in turn depends on the user config and the augmentation
|
||||
// properties. So we need to calculate the overlay database mode after the
|
||||
@@ -961,7 +916,6 @@ export async function initConfig(
|
||||
config.buildMode,
|
||||
inputs.ramInput,
|
||||
config.computedConfig,
|
||||
gitVersion,
|
||||
logger,
|
||||
);
|
||||
logger.info(
|
||||
@@ -1362,26 +1316,3 @@ export function getPrimaryAnalysisConfig(config: Config): AnalysisConfig {
|
||||
? CodeScanning
|
||||
: CodeQuality;
|
||||
}
|
||||
|
||||
/** Logs the Git version as a telemetry diagnostic. */
|
||||
async function logGitVersionTelemetry(
|
||||
config: Config,
|
||||
gitVersion: GitVersionInfo,
|
||||
): Promise<void> {
|
||||
if (config.languages.length > 0) {
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/git-version-telemetry",
|
||||
"Git version telemetry",
|
||||
{
|
||||
fullVersion: gitVersion.fullVersion,
|
||||
truncatedVersion: gitVersion.truncatedVersion,
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -231,7 +231,7 @@ test("Don't crash if uploading a database fails", async (t) => {
|
||||
(v) =>
|
||||
v.type === "warning" &&
|
||||
v.message ===
|
||||
"Failed to upload database for javascript: some error message",
|
||||
"Failed to upload database for javascript: Error: some error message",
|
||||
) !== undefined,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -13,20 +13,6 @@ import { RepositoryNwo } from "./repository";
|
||||
import * as util from "./util";
|
||||
import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util";
|
||||
|
||||
/** Information about a database upload. */
|
||||
export interface DatabaseUploadResult {
|
||||
/** Language of the database. */
|
||||
language: string;
|
||||
/** Size of the zipped database in bytes. */
|
||||
zipped_upload_size_bytes?: number;
|
||||
/** Whether the uploaded database is an overlay base. */
|
||||
is_overlay_base?: boolean;
|
||||
/** Time taken to upload database in milliseconds. */
|
||||
upload_duration_ms?: number;
|
||||
/** If there was an error during database upload, this is its message. */
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export async function cleanupAndUploadDatabases(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
codeql: CodeQL,
|
||||
@@ -34,46 +20,44 @@ export async function cleanupAndUploadDatabases(
|
||||
apiDetails: GitHubApiDetails,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<DatabaseUploadResult[]> {
|
||||
): Promise<void> {
|
||||
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
|
||||
logger.debug("Database upload disabled in workflow. Skipping upload.");
|
||||
return [];
|
||||
return;
|
||||
}
|
||||
|
||||
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
|
||||
logger.debug(
|
||||
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
|
||||
);
|
||||
return [];
|
||||
return;
|
||||
}
|
||||
|
||||
if (util.isInTestMode()) {
|
||||
logger.debug("In test mode. Skipping database upload.");
|
||||
return [];
|
||||
return;
|
||||
}
|
||||
|
||||
// Do nothing when not running against github.com
|
||||
if (
|
||||
config.gitHubVersion.type !== util.GitHubVariant.DOTCOM &&
|
||||
config.gitHubVersion.type !== util.GitHubVariant.GHEC_DR
|
||||
config.gitHubVersion.type !== util.GitHubVariant.GHE_DOTCOM
|
||||
) {
|
||||
logger.debug("Not running against github.com or GHEC-DR. Skipping upload.");
|
||||
return [];
|
||||
return;
|
||||
}
|
||||
|
||||
if (!(await gitUtils.isAnalyzingDefaultBranch())) {
|
||||
// We only want to upload a database if we are analyzing the default branch.
|
||||
logger.debug("Not analyzing default branch. Skipping upload.");
|
||||
return [];
|
||||
return;
|
||||
}
|
||||
|
||||
// If config.overlayDatabaseMode is OverlayBase, then we have overlay base databases for all languages.
|
||||
const shouldUploadOverlayBase =
|
||||
const cleanupLevel =
|
||||
config.overlayDatabaseMode === OverlayDatabaseMode.OverlayBase &&
|
||||
(await features.getValue(Feature.UploadOverlayDbToApi));
|
||||
const cleanupLevel = shouldUploadOverlayBase
|
||||
? CleanupLevel.Overlay
|
||||
: CleanupLevel.Clear;
|
||||
(await features.getValue(Feature.UploadOverlayDbToApi))
|
||||
? CleanupLevel.Overlay
|
||||
: CleanupLevel.Clear;
|
||||
|
||||
// Clean up the database, since intermediate results may still be written to the
|
||||
// database if there is high RAM pressure.
|
||||
@@ -93,22 +77,19 @@ export async function cleanupAndUploadDatabases(
|
||||
uploadsBaseUrl = uploadsBaseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
const reports: DatabaseUploadResult[] = [];
|
||||
for (const language of config.languages) {
|
||||
let bundledDbSize: number | undefined = undefined;
|
||||
try {
|
||||
// Upload the database bundle.
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const bundledDb = await bundleDb(config, language, codeql, language);
|
||||
bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
const commitOid = await gitUtils.getCommitOid(
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
);
|
||||
try {
|
||||
const startTime = performance.now();
|
||||
await client.request(
|
||||
`POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`,
|
||||
{
|
||||
@@ -126,30 +107,13 @@ export async function cleanupAndUploadDatabases(
|
||||
},
|
||||
},
|
||||
);
|
||||
const endTime = performance.now();
|
||||
reports.push({
|
||||
language,
|
||||
zipped_upload_size_bytes: bundledDbSize,
|
||||
is_overlay_base: shouldUploadOverlayBase,
|
||||
upload_duration_ms: endTime - startTime,
|
||||
});
|
||||
logger.debug(`Successfully uploaded database for ${language}`);
|
||||
} finally {
|
||||
bundledDbReadStream.close();
|
||||
}
|
||||
} catch (e) {
|
||||
// Log a warning but don't fail the workflow
|
||||
logger.warning(
|
||||
`Failed to upload database for ${language}: ${util.getErrorMessage(e)}`,
|
||||
);
|
||||
reports.push({
|
||||
language,
|
||||
error: util.getErrorMessage(e),
|
||||
...(bundledDbSize !== undefined
|
||||
? { zipped_upload_size_bytes: bundledDbSize }
|
||||
: {}),
|
||||
});
|
||||
logger.warning(`Failed to upload database for ${language}: ${e}`);
|
||||
}
|
||||
}
|
||||
return reports;
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import archiver from "archiver";
|
||||
|
||||
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
import { scanArtifactsForTokens } from "./artifact-scanner";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
@@ -24,7 +23,6 @@ import {
|
||||
getCodeQLDatabasePath,
|
||||
getErrorMessage,
|
||||
GitHubVariant,
|
||||
isInTestMode,
|
||||
listFolder,
|
||||
} from "./util";
|
||||
|
||||
@@ -271,14 +269,6 @@ export async function uploadDebugArtifacts(
|
||||
return "upload-not-supported";
|
||||
}
|
||||
|
||||
// When running in test mode, perform a best effort scan of the debug artifacts. The artifact
|
||||
// scanner is basic and not reliable or fast enough for production use, but it can help catch
|
||||
// some issues early.
|
||||
if (isInTestMode()) {
|
||||
await scanArtifactsForTokens(toUpload, logger);
|
||||
core.exportVariable("CODEQL_ACTION_ARTIFACT_SCAN_FINISHED", "true");
|
||||
}
|
||||
|
||||
let suffix = "";
|
||||
const matrix = getOptionalInput("matrix");
|
||||
if (matrix) {
|
||||
|
||||
@@ -603,6 +603,28 @@ test("getFeaturePrefix - returns empty string if no features are enabled", async
|
||||
}
|
||||
});
|
||||
|
||||
test("getFeaturePrefix - Java - returns 'minify-' if JavaMinimizeDependencyJars is enabled", async (t) => {
|
||||
const codeql = createStubCodeQL({});
|
||||
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
|
||||
|
||||
const result = await getFeaturePrefix(codeql, features, KnownLanguage.java);
|
||||
t.deepEqual(result, "minify-");
|
||||
});
|
||||
|
||||
test("getFeaturePrefix - non-Java - returns '' if JavaMinimizeDependencyJars is enabled", async (t) => {
|
||||
const codeql = createStubCodeQL({});
|
||||
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
|
||||
|
||||
for (const knownLanguage of Object.values(KnownLanguage)) {
|
||||
// Skip Java since we expect a result for it, which is tested in the previous test.
|
||||
if (knownLanguage === KnownLanguage.java) {
|
||||
continue;
|
||||
}
|
||||
const result = await getFeaturePrefix(codeql, features, knownLanguage);
|
||||
t.deepEqual(result, "", `Expected no feature prefix for ${knownLanguage}`);
|
||||
}
|
||||
});
|
||||
|
||||
test("getFeaturePrefix - C# - returns prefix if CsharpNewCacheKey is enabled", async (t) => {
|
||||
const codeql = createStubCodeQL({});
|
||||
const features = createFeatures([Feature.CsharpNewCacheKey]);
|
||||
|
||||
@@ -541,7 +541,18 @@ export async function getFeaturePrefix(
|
||||
}
|
||||
};
|
||||
|
||||
if (language === KnownLanguage.csharp) {
|
||||
if (language === KnownLanguage.java) {
|
||||
// To ensure a safe rollout of JAR minimization, we change the key when the feature is enabled.
|
||||
const minimizeJavaJars = await features.getValue(
|
||||
Feature.JavaMinimizeDependencyJars,
|
||||
codeql,
|
||||
);
|
||||
|
||||
// To maintain backwards compatibility with this, we return "minify-" instead of a hash.
|
||||
if (minimizeJavaJars) {
|
||||
return "minify-";
|
||||
}
|
||||
} else if (language === KnownLanguage.csharp) {
|
||||
await addFeatureIfEnabled(Feature.CsharpNewCacheKey);
|
||||
await addFeatureIfEnabled(Feature.CsharpCacheBuildModeNone);
|
||||
}
|
||||
@@ -582,8 +593,14 @@ async function cachePrefix(
|
||||
// experimental features that affect the cache contents.
|
||||
const featurePrefix = await getFeaturePrefix(codeql, features, language);
|
||||
|
||||
// Assemble the cache key.
|
||||
return `${prefix}-${featurePrefix}${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
|
||||
// Assemble the cache key. For backwards compatibility with the JAR minification experiment's existing
|
||||
// feature prefix usage, we add that feature prefix at the start. Other feature prefixes are inserted
|
||||
// after the general CodeQL dependency cache prefix.
|
||||
if (featurePrefix === "minify-") {
|
||||
return `${featurePrefix}${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
|
||||
} else {
|
||||
return `${prefix}-${featurePrefix}${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
|
||||
}
|
||||
}
|
||||
|
||||
/** Represents information about our overall cache usage for CodeQL dependency caches. */
|
||||
|
||||
@@ -185,27 +185,3 @@ export function flushDiagnostics(config: Config) {
|
||||
// Reset the unwritten diagnostics array.
|
||||
unwrittenDiagnostics = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a telemetry-only diagnostic message. This is a convenience function
|
||||
* for creating diagnostics that should only be sent to telemetry and not
|
||||
* displayed on the status page or CLI summary table.
|
||||
*
|
||||
* @param id An identifier under which it makes sense to group this diagnostic message
|
||||
* @param name Display name
|
||||
* @param attributes Structured metadata
|
||||
*/
|
||||
export function makeTelemetryDiagnostic(
|
||||
id: string,
|
||||
name: string,
|
||||
attributes: { [key: string]: any },
|
||||
): DiagnosticMessage {
|
||||
return makeDiagnostic(id, name, {
|
||||
attributes,
|
||||
visibility: {
|
||||
cliSummaryTable: false,
|
||||
statusPage: false,
|
||||
telemetry: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -129,10 +129,4 @@ export enum EnvVar {
|
||||
* the workflow is valid and validation is not necessary.
|
||||
*/
|
||||
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
|
||||
|
||||
/**
|
||||
* Whether to tolerate failure to determine the git version (only applicable in test mode).
|
||||
* Intended for use in environments where git may not be installed, such as Docker containers.
|
||||
*/
|
||||
TOLERATE_MISSING_GIT_VERSION = "CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION",
|
||||
}
|
||||
|
||||
@@ -62,13 +62,13 @@ test(`All features are disabled if running against GHES`, async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test(`Feature flags are requested in GHEC-DR`, async (t) => {
|
||||
test(`Feature flags are requested in Proxima`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const features = setUpFeatureFlagTests(
|
||||
tmpDir,
|
||||
getRecordingLogger(loggedMessages),
|
||||
{ type: GitHubVariant.GHEC_DR },
|
||||
{ type: GitHubVariant.GHE_DOTCOM },
|
||||
);
|
||||
|
||||
mockFeatureFlagApiEndpoint(200, initializeFeatures(true));
|
||||
@@ -436,79 +436,97 @@ test(`selects CLI from defaults.json on GHES`, async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) {
|
||||
test(`selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] =
|
||||
false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] =
|
||||
false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] =
|
||||
false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] =
|
||||
false;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
test("selects CLI v2.20.1 on Dotcom when feature flags enable v2.20.0 and v2.20.1", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] = false;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(variant);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.20.1",
|
||||
tagName: "codeql-bundle-v2.20.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
});
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(
|
||||
GitHubVariant.DOTCOM,
|
||||
);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.20.1",
|
||||
tagName: "codeql-bundle-v2.20.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test(`selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
test("includes tag name", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(variant);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
toolsFeatureFlagsValid: false,
|
||||
});
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(
|
||||
GitHubVariant.DOTCOM,
|
||||
);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.20.0",
|
||||
tagName: "codeql-bundle-v2.20.0",
|
||||
toolsFeatureFlagsValid: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test(`ignores invalid version numbers in default version feature flags on ${variant}`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const features = setUpFeatureFlagTests(
|
||||
tmpDir,
|
||||
getRecordingLogger(loggedMessages),
|
||||
);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_invalid_enabled"] =
|
||||
true;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
test(`selects CLI from defaults.json on Dotcom when no default version feature flags are enabled`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(variant);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.20.1",
|
||||
tagName: "codeql-bundle-v2.20.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
});
|
||||
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "warning" &&
|
||||
v.message ===
|
||||
"Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.",
|
||||
) !== undefined,
|
||||
);
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(
|
||||
GitHubVariant.DOTCOM,
|
||||
);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
toolsFeatureFlagsValid: false,
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
test("ignores invalid version numbers in default version feature flags", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const features = setUpFeatureFlagTests(
|
||||
tmpDir,
|
||||
getRecordingLogger(loggedMessages),
|
||||
);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_20_invalid_enabled"] =
|
||||
true;
|
||||
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
|
||||
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(
|
||||
GitHubVariant.DOTCOM,
|
||||
);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.20.1",
|
||||
tagName: "codeql-bundle-v2.20.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
});
|
||||
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "warning" &&
|
||||
v.message ===
|
||||
"Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.",
|
||||
) !== undefined,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("legacy feature flags should end with _enabled", async (t) => {
|
||||
for (const [feature, config] of Object.entries(featureConfig)) {
|
||||
|
||||
@@ -44,6 +44,7 @@ export interface FeatureEnablement {
|
||||
*/
|
||||
export enum Feature {
|
||||
AllowToolcacheInput = "allow_toolcache_input",
|
||||
AnalyzeUseNewUpload = "analyze_use_new_upload",
|
||||
CleanupTrapCaches = "cleanup_trap_caches",
|
||||
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
||||
CsharpCacheBuildModeNone = "csharp_cache_bmn",
|
||||
@@ -53,6 +54,7 @@ export enum Feature {
|
||||
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
|
||||
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
||||
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
|
||||
JavaMinimizeDependencyJars = "java_minimize_dependency_jars",
|
||||
OverlayAnalysis = "overlay_analysis",
|
||||
OverlayAnalysisActions = "overlay_analysis_actions",
|
||||
OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions",
|
||||
@@ -118,6 +120,11 @@ export const featureConfig: Record<
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.AnalyzeUseNewUpload]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.CleanupTrapCaches]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -167,6 +174,11 @@ export const featureConfig: Record<
|
||||
legacyApi: true,
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.JavaMinimizeDependencyJars]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
||||
minimumVersion: "2.23.0",
|
||||
},
|
||||
[Feature.OverlayAnalysis]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",
|
||||
@@ -293,7 +305,6 @@ export const featureConfig: Record<
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API",
|
||||
minimumVersion: undefined,
|
||||
toolsFeature: ToolsFeature.BundleSupportsOverlay,
|
||||
},
|
||||
[Feature.UseRepositoryProperties]: {
|
||||
defaultValue: false,
|
||||
@@ -487,8 +498,8 @@ class GitHubFeatureFlags {
|
||||
async getDefaultCliVersion(
|
||||
variant: util.GitHubVariant,
|
||||
): Promise<CodeQLDefaultVersionInfo> {
|
||||
if (supportsFeatureFlags(variant)) {
|
||||
return await this.getDefaultCliVersionFromFlags();
|
||||
if (variant === util.GitHubVariant.DOTCOM) {
|
||||
return await this.getDefaultDotcomCliVersion();
|
||||
}
|
||||
return {
|
||||
cliVersion: defaults.cliVersion,
|
||||
@@ -496,7 +507,7 @@ class GitHubFeatureFlags {
|
||||
};
|
||||
}
|
||||
|
||||
async getDefaultCliVersionFromFlags(): Promise<CodeQLDefaultVersionInfo> {
|
||||
async getDefaultDotcomCliVersion(): Promise<CodeQLDefaultVersionInfo> {
|
||||
const response = await this.getAllFeatures();
|
||||
|
||||
const enabledFeatureFlagCliVersions = Object.entries(response)
|
||||
@@ -622,7 +633,10 @@ class GitHubFeatureFlags {
|
||||
|
||||
private async loadApiResponse(): Promise<GitHubFeatureFlagsApiResponse> {
|
||||
// Do nothing when not running against github.com
|
||||
if (!supportsFeatureFlags(this.gitHubVersion.type)) {
|
||||
if (
|
||||
this.gitHubVersion.type !== util.GitHubVariant.DOTCOM &&
|
||||
this.gitHubVersion.type !== util.GitHubVariant.GHE_DOTCOM
|
||||
) {
|
||||
this.logger.debug(
|
||||
"Not running against github.com. Disabling all toggleable features.",
|
||||
);
|
||||
@@ -688,10 +702,3 @@ class GitHubFeatureFlags {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function supportsFeatureFlags(githubVariant: util.GitHubVariant): boolean {
|
||||
return (
|
||||
githubVariant === util.GitHubVariant.DOTCOM ||
|
||||
githubVariant === util.GitHubVariant.GHEC_DR
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
@@ -316,23 +315,27 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => {
|
||||
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
|
||||
);
|
||||
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
try {
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
|
||||
t.deepEqual(result, {
|
||||
"lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||
"lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||
"src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||
});
|
||||
t.deepEqual(result, {
|
||||
"lib/git-utils.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||
"lib/git-utils.js.map": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||
"src/git-utils.ts": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||
});
|
||||
|
||||
t.deepEqual(runGitCommandStub.firstCall.args, [
|
||||
"/fake/path",
|
||||
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
|
||||
"Cannot list Git OIDs of tracked files.",
|
||||
]);
|
||||
t.deepEqual(runGitCommandStub.firstCall.args, [
|
||||
"/fake/path",
|
||||
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
|
||||
"Cannot list Git OIDs of tracked files.",
|
||||
]);
|
||||
} finally {
|
||||
runGitCommandStub.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test("getFileOidsUnderPath handles quoted paths", async (t) => {
|
||||
sinon
|
||||
const runGitCommandStub = sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.resolves(
|
||||
"30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" +
|
||||
@@ -340,24 +343,34 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => {
|
||||
'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"',
|
||||
);
|
||||
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
try {
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
|
||||
t.deepEqual(result, {
|
||||
"lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||
});
|
||||
t.deepEqual(result, {
|
||||
"lib/normal-file.js": "30d998ded095371488be3a729eb61d86ed721a18",
|
||||
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
|
||||
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
|
||||
});
|
||||
} finally {
|
||||
runGitCommandStub.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test("getFileOidsUnderPath handles empty output", async (t) => {
|
||||
sinon.stub(gitUtils as any, "runGitCommand").resolves("");
|
||||
const runGitCommandStub = sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.resolves("");
|
||||
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
t.deepEqual(result, {});
|
||||
try {
|
||||
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
t.deepEqual(result, {});
|
||||
} finally {
|
||||
runGitCommandStub.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
|
||||
sinon
|
||||
const runGitCommandStub = sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.resolves(
|
||||
"30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
|
||||
@@ -365,73 +378,17 @@ test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
|
||||
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
|
||||
);
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: 'Unexpected "git ls-files" output: invalid-line-format',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("getGitVersionOrThrow returns version for valid git output", async (t) => {
|
||||
sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.resolves(`git version 2.40.0${os.EOL}`);
|
||||
|
||||
const version = await gitUtils.getGitVersionOrThrow();
|
||||
t.is(version.truncatedVersion, "2.40.0");
|
||||
t.is(version.fullVersion, "2.40.0");
|
||||
});
|
||||
|
||||
test("getGitVersionOrThrow throws for invalid git output", async (t) => {
|
||||
sinon.stub(gitUtils as any, "runGitCommand").resolves("invalid output");
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getGitVersionOrThrow();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: "Could not parse Git version from output: invalid output",
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("getGitVersionOrThrow handles Windows-style git output", async (t) => {
|
||||
sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.resolves("git version 2.40.0.windows.1");
|
||||
|
||||
const version = await gitUtils.getGitVersionOrThrow();
|
||||
// The truncated version should contain just the major.minor.patch portion
|
||||
t.is(version.truncatedVersion, "2.40.0");
|
||||
t.is(version.fullVersion, "2.40.0.windows.1");
|
||||
});
|
||||
|
||||
test("getGitVersionOrThrow throws when git command fails", async (t) => {
|
||||
sinon
|
||||
.stub(gitUtils as any, "runGitCommand")
|
||||
.rejects(new Error("git not found"));
|
||||
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getGitVersionOrThrow();
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: "git not found",
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("GitVersionInfo.isAtLeast correctly compares versions", async (t) => {
|
||||
const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0");
|
||||
|
||||
t.true(version.isAtLeast("2.38.0"));
|
||||
t.true(version.isAtLeast("2.40.0"));
|
||||
t.false(version.isAtLeast("2.41.0"));
|
||||
t.false(version.isAtLeast("3.0.0"));
|
||||
try {
|
||||
await t.throwsAsync(
|
||||
async () => {
|
||||
await gitUtils.getFileOidsUnderPath("/fake/path");
|
||||
},
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: 'Unexpected "git ls-files" output: invalid-line-format',
|
||||
},
|
||||
);
|
||||
} finally {
|
||||
runGitCommandStub.restore();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
import * as semver from "semver";
|
||||
|
||||
import {
|
||||
getOptionalInput,
|
||||
@@ -10,52 +9,6 @@ import {
|
||||
} from "./actions-util";
|
||||
import { ConfigurationError, getRequiredEnvParam } from "./util";
|
||||
|
||||
/**
|
||||
* Minimum Git version required for overlay analysis. The `git ls-files --format`
|
||||
* option, which is used by `getFileOidsUnderPath`, was introduced in Git 2.38.0.
|
||||
*/
|
||||
export const GIT_MINIMUM_VERSION_FOR_OVERLAY = "2.38.0";
|
||||
|
||||
/**
|
||||
* Git version information
|
||||
*
|
||||
* The full version string as reported by `git --version` may not be
|
||||
* semver-compatible (e.g., "2.40.0.windows.1"). This class captures both
|
||||
* the full version string and a truncated semver-compatible version string
|
||||
* (e.g., "2.40.0").
|
||||
*/
|
||||
export class GitVersionInfo {
|
||||
constructor(
|
||||
/** Truncated semver-compatible version */
|
||||
public truncatedVersion: string,
|
||||
/** Full version string as reported by `git --version` */
|
||||
public fullVersion: string,
|
||||
) {}
|
||||
|
||||
isAtLeast(minVersion: string): boolean {
|
||||
return semver.gte(this.truncatedVersion, minVersion);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the version of Git installed on the system and throws an error if
|
||||
* the version cannot be determined.
|
||||
*/
|
||||
export async function getGitVersionOrThrow(): Promise<GitVersionInfo> {
|
||||
const stdout = await runGitCommand(
|
||||
undefined,
|
||||
["--version"],
|
||||
"Failed to get git version.",
|
||||
);
|
||||
// Git version output can vary: "git version 2.40.0" or "git version 2.40.0.windows.1"
|
||||
// We capture just the major.minor.patch portion to ensure semver compatibility.
|
||||
const match = stdout.trim().match(/^git version ((\d+\.\d+\.\d+).*)$/);
|
||||
if (match?.[1] && match?.[2]) {
|
||||
return new GitVersionInfo(match[2], match[1]);
|
||||
}
|
||||
throw new Error(`Could not parse Git version from output: ${stdout.trim()}`);
|
||||
}
|
||||
|
||||
export const runGitCommand = async function (
|
||||
workingDirectory: string | undefined,
|
||||
args: string[],
|
||||
|
||||
@@ -33,7 +33,6 @@ import {
|
||||
flushDiagnostics,
|
||||
logUnwrittenDiagnostics,
|
||||
makeDiagnostic,
|
||||
makeTelemetryDiagnostic,
|
||||
} from "./diagnostics";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
@@ -89,13 +88,6 @@ import {
|
||||
} from "./util";
|
||||
import { checkWorkflow } from "./workflow";
|
||||
|
||||
/**
|
||||
* First version of CodeQL where the Java extractor safely supports the option to minimize
|
||||
* dependency jars. Note: some earlier versions of the extractor will respond to the corresponding
|
||||
* option, but may rewrite jars in ways that lead to extraction errors.
|
||||
*/
|
||||
export const CODEQL_VERSION_JAR_MINIMIZATION = "2.23.0";
|
||||
|
||||
/**
|
||||
* Sends a status report indicating that the `init` Action is starting.
|
||||
*
|
||||
@@ -426,10 +418,17 @@ async function run() {
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
makeDiagnostic(
|
||||
"codeql-action/bundle-download-telemetry",
|
||||
"CodeQL bundle download telemetry",
|
||||
toolsDownloadStatusReport,
|
||||
{
|
||||
attributes: toolsDownloadStatusReport,
|
||||
visibility: {
|
||||
cliSummaryTable: false,
|
||||
statusPage: false,
|
||||
telemetry: true,
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -639,20 +638,18 @@ async function run() {
|
||||
}
|
||||
}
|
||||
|
||||
// If we are doing a Java `build-mode: none` analysis, then set the environment variable that
|
||||
// enables the option in the Java extractor to minimize dependency jars. We also only do this if
|
||||
// dependency caching is enabled, since the option is intended to reduce the size of dependency
|
||||
// caches, but the jar-rewriting does have a performance cost that we'd like to avoid when
|
||||
// caching is not being used.
|
||||
// TODO: Remove this language-specific mechanism and replace it with a more general one that
|
||||
// tells extractors when dependency caching is enabled, and then the Java extractor can make its
|
||||
// own decision about whether to rewrite jars.
|
||||
// If the feature flag to minimize Java dependency jars is enabled, and we are doing a Java
|
||||
// `build-mode: none` analysis (i.e. the flag is relevant), then set the environment variable
|
||||
// that enables the corresponding option in the Java extractor. We also only do this if
|
||||
// dependency caching is enabled, since the option is intended to reduce the size of
|
||||
// dependency caches, but the jar-rewriting does have a performance cost that we'd like to avoid
|
||||
// when caching is not being used.
|
||||
if (process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]) {
|
||||
logger.debug(
|
||||
`${EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS} is already set to '${process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]}', so the Action will not override it.`,
|
||||
);
|
||||
} else if (
|
||||
(await codeQlVersionAtLeast(codeql, CODEQL_VERSION_JAR_MINIMIZATION)) &&
|
||||
(await features.getValue(Feature.JavaMinimizeDependencyJars, codeql)) &&
|
||||
config.dependencyCachingEnabled &&
|
||||
config.buildMode === BuildMode.None &&
|
||||
config.languages.includes(KnownLanguage.java)
|
||||
@@ -788,10 +785,17 @@ async function recordZstdAvailability(
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
makeDiagnostic(
|
||||
"codeql-action/zstd-availability",
|
||||
"Zstandard availability",
|
||||
zstdAvailability,
|
||||
{
|
||||
attributes: zstdAvailability,
|
||||
visibility: {
|
||||
cliSummaryTable: false,
|
||||
statusPage: false,
|
||||
telemetry: true,
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -511,7 +511,7 @@ export async function getCodeQLSource(
|
||||
// different version to save download time if the version hasn't been
|
||||
// specified explicitly (in which case we always honor it).
|
||||
if (
|
||||
variant === util.GitHubVariant.GHES &&
|
||||
variant !== util.GitHubVariant.DOTCOM &&
|
||||
!forceShippedTools &&
|
||||
!toolsInput
|
||||
) {
|
||||
|
||||
BIN
src/testdata/debug-artifacts-with-fake-token.zip
vendored
BIN
src/testdata/debug-artifacts-with-fake-token.zip
vendored
Binary file not shown.
@@ -152,38 +152,27 @@ export interface LoggedMessage {
|
||||
message: string | Error;
|
||||
}
|
||||
|
||||
export function getRecordingLogger(
|
||||
messages: LoggedMessage[],
|
||||
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
|
||||
): Logger {
|
||||
export function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
||||
return {
|
||||
debug: (message: string) => {
|
||||
messages.push({ type: "debug", message });
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
},
|
||||
info: (message: string) => {
|
||||
messages.push({ type: "info", message });
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(message);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.info(message);
|
||||
},
|
||||
warning: (message: string | Error) => {
|
||||
messages.push({ type: "warning", message });
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(message);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(message);
|
||||
},
|
||||
error: (message: string | Error) => {
|
||||
messages.push({ type: "error", message });
|
||||
if (logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(message);
|
||||
},
|
||||
isDebug: () => true,
|
||||
startGroup: () => undefined,
|
||||
|
||||
@@ -4,7 +4,6 @@ import type { VersionInfo } from "./codeql";
|
||||
|
||||
export enum ToolsFeature {
|
||||
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
|
||||
BundleSupportsOverlay = "bundleSupportsOverlay",
|
||||
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
|
||||
ForceOverwrite = "forceOverwrite",
|
||||
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
|
||||
|
||||
@@ -433,8 +433,8 @@ function formatGitHubVersion(version: util.GitHubVersion): string {
|
||||
switch (version.type) {
|
||||
case util.GitHubVariant.DOTCOM:
|
||||
return "dotcom";
|
||||
case util.GitHubVariant.GHEC_DR:
|
||||
return "GHEC-DR";
|
||||
case util.GitHubVariant.GHE_DOTCOM:
|
||||
return "GHE dotcom";
|
||||
case util.GitHubVariant.GHES:
|
||||
return `GHES ${version.version}`;
|
||||
default:
|
||||
@@ -445,12 +445,12 @@ function formatGitHubVersion(version: util.GitHubVersion): string {
|
||||
const CHECK_ACTION_VERSION_TESTS: Array<[string, util.GitHubVersion, boolean]> =
|
||||
[
|
||||
["2.2.1", { type: util.GitHubVariant.DOTCOM }, true],
|
||||
["2.2.1", { type: util.GitHubVariant.GHEC_DR }, true],
|
||||
["2.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, true],
|
||||
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false],
|
||||
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false],
|
||||
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false],
|
||||
["3.2.1", { type: util.GitHubVariant.DOTCOM }, true],
|
||||
["3.2.1", { type: util.GitHubVariant.GHEC_DR }, true],
|
||||
["3.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, true],
|
||||
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false],
|
||||
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false],
|
||||
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false],
|
||||
@@ -458,7 +458,7 @@ const CHECK_ACTION_VERSION_TESTS: Array<[string, util.GitHubVersion, boolean]> =
|
||||
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, true],
|
||||
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, true],
|
||||
["4.2.1", { type: util.GitHubVariant.DOTCOM }, false],
|
||||
["4.2.1", { type: util.GitHubVariant.GHEC_DR }, false],
|
||||
["4.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, false],
|
||||
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.19" }, false],
|
||||
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, false],
|
||||
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, false],
|
||||
|
||||
14
src/util.ts
14
src/util.ts
@@ -556,17 +556,13 @@ const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR =
|
||||
let hasBeenWarnedAboutVersion = false;
|
||||
|
||||
export enum GitHubVariant {
|
||||
/** [GitHub.com](https://github.com) */
|
||||
DOTCOM = "GitHub.com",
|
||||
/** [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@latest/admin/overview/about-github-enterprise-server) */
|
||||
GHES = "GitHub Enterprise Server",
|
||||
/** [GitHub Enterprise Cloud with data residency](https://docs.github.com/en/enterprise-cloud@latest/admin/data-residency/about-github-enterprise-cloud-with-data-residency) */
|
||||
GHEC_DR = "GitHub Enterprise Cloud with data residency",
|
||||
DOTCOM,
|
||||
GHES,
|
||||
GHE_DOTCOM,
|
||||
}
|
||||
|
||||
export type GitHubVersion =
|
||||
| { type: GitHubVariant.DOTCOM }
|
||||
| { type: GitHubVariant.GHEC_DR }
|
||||
| { type: GitHubVariant.GHE_DOTCOM }
|
||||
| { type: GitHubVariant.GHES; version: string };
|
||||
|
||||
export function checkGitHubVersionInRange(
|
||||
@@ -1109,7 +1105,7 @@ export function checkActionVersion(
|
||||
// and should update to CodeQL Action v4.
|
||||
if (
|
||||
githubVersion.type === GitHubVariant.DOTCOM ||
|
||||
githubVersion.type === GitHubVariant.GHEC_DR ||
|
||||
githubVersion.type === GitHubVariant.GHE_DOTCOM ||
|
||||
(githubVersion.type === GitHubVariant.GHES &&
|
||||
semver.satisfies(
|
||||
semver.coerce(githubVersion.version) ?? "0.0.0",
|
||||
|
||||
@@ -29,6 +29,6 @@ outputs:
|
||||
proxy_urls:
|
||||
description: A stringified JSON array of objects containing the types and URLs of the configured registries.
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: "../lib/start-proxy-action.js"
|
||||
post: "../lib/start-proxy-action-post.js"
|
||||
|
||||
@@ -41,6 +41,6 @@ outputs:
|
||||
|
||||
{ "code-scanning": "some-id", "code-quality": "some-other-id" }
|
||||
runs:
|
||||
using: node24
|
||||
using: node20
|
||||
main: '../lib/upload-sarif-action.js'
|
||||
post: '../lib/upload-sarif-action-post.js'
|
||||
|
||||
Reference in New Issue
Block a user