Compare commits

..

65 Commits

Author SHA1 Message Date
Henry Mercer
bffd034ab1 Merge pull request #3346 from github/backport-v3.31.7-cf1bb45a2
Merge releases/v4 into releases/v3
2025-12-09 13:26:37 +00:00
github-actions[bot]
817dbfb39b Rebuild 2025-12-05 20:45:51 +00:00
github-actions[bot]
793f7006bb Update version and changelog for v3.31.7 2025-12-05 17:21:20 +00:00
github-actions[bot]
d2e9832330 Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.7-cf1bb45a2 2025-12-05 17:21:19 +00:00
github-actions[bot]
c2e4b7785f Revert "Rebuild"
This reverts commit 89cb79a131.
2025-12-05 17:21:18 +00:00
github-actions[bot]
66d7f51a10 Revert "Update version and changelog for v3.31.6"
This reverts commit dbf6819ebd.
2025-12-05 17:21:18 +00:00
Michael B. Gale
497990dfed Merge pull request #3338 from github/backport-v3.31.6-fe4161a26
Merge releases/v4 into releases/v3
2025-12-01 10:24:24 +00:00
github-actions[bot]
89cb79a131 Rebuild 2025-12-01 09:59:32 +00:00
github-actions[bot]
dbf6819ebd Update version and changelog for v3.31.6 2025-12-01 09:52:51 +00:00
github-actions[bot]
5af51f4048 Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.6-fe4161a26 2025-12-01 09:52:49 +00:00
github-actions[bot]
e439418aab Revert "Rebuild"
This reverts commit c12d7c1f2d.
2025-12-01 09:52:49 +00:00
github-actions[bot]
249860e323 Revert "Update version and changelog for v3.31.5"
This reverts commit 2e2a1cf1ef.
2025-12-01 09:52:49 +00:00
Paolo Tranquilli
d3ced5c96c Merge pull request #3324 from github/backport-v3.31.5-fdbfb4d27
Merge releases/v4 into releases/v3
2025-11-24 12:16:57 +01:00
github-actions[bot]
c12d7c1f2d Rebuild 2025-11-24 10:56:57 +00:00
github-actions[bot]
2e2a1cf1ef Update version and changelog for v3.31.5 2025-11-24 09:33:54 +00:00
github-actions[bot]
e2cca77d06 Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.5-fdbfb4d27 2025-11-24 09:33:53 +00:00
github-actions[bot]
801a18bea6 Revert "Rebuild"
This reverts commit 9031cd9330.
2025-11-24 09:33:52 +00:00
github-actions[bot]
1c715a714c Revert "Update version and changelog for v3.31.4"
This reverts commit f58938aee2.
2025-11-24 09:33:52 +00:00
Henry Mercer
c3d42c5d08 Merge pull request #3314 from github/backport-v3.31.4-e12f01789
Merge releases/v4 into releases/v3
2025-11-19 10:02:25 +00:00
github-actions[bot]
9031cd9330 Rebuild 2025-11-18 17:06:56 +00:00
github-actions[bot]
f58938aee2 Update version and changelog for v3.31.4 2025-11-18 16:16:32 +00:00
github-actions[bot]
1f1c162805 Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.4-e12f01789 2025-11-18 16:16:30 +00:00
github-actions[bot]
7ab96a0e6f Revert "Rebuild"
This reverts commit e5971bdba6.
2025-11-18 16:16:30 +00:00
github-actions[bot]
e3cb86275a Revert "Update version and changelog for v3.31.3"
This reverts commit c5a9d29dc9.
2025-11-18 16:16:29 +00:00
Michael B. Gale
f94c9befff Merge pull request #3295 from github/backport-v3.31.3-014f16e7a
Merge releases/v4 into releases/v3
2025-11-13 22:45:46 +00:00
github-actions[bot]
e5971bdba6 Rebuild 2025-11-13 22:03:22 +00:00
github-actions[bot]
c5a9d29dc9 Update version and changelog for v3.31.3 2025-11-13 21:57:42 +00:00
github-actions[bot]
9f1109665d Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.3-014f16e7a 2025-11-13 21:57:40 +00:00
github-actions[bot]
f8f60f3a2b Revert "Rebuild"
This reverts commit c6eb09db21.
2025-11-13 21:57:40 +00:00
github-actions[bot]
f4d10b9ef7 Revert "Update version and changelog for v3.31.2"
This reverts commit 09db9044dc.
2025-11-13 21:57:39 +00:00
Henry Mercer
5d5cd550d3 Merge pull request #3263 from github/backport-v3.31.2-0499de31b
Merge releases/v4 into releases/v3
2025-10-30 15:01:09 +00:00
github-actions[bot]
c6eb09db21 Rebuild 2025-10-30 14:37:32 +00:00
github-actions[bot]
09db9044dc Update version and changelog for v3.31.2 2025-10-30 14:34:26 +00:00
github-actions[bot]
d3cd47d8d6 Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.2-0499de31b 2025-10-30 14:34:25 +00:00
github-actions[bot]
8e9caa5100 Revert "Rebuild"
This reverts commit c2805e0a04.
2025-10-30 14:34:25 +00:00
github-actions[bot]
23a6333b88 Revert "Update version and changelog for v3.31.1"
This reverts commit c0d3370b54.
2025-10-30 14:34:24 +00:00
Henry Mercer
c503cb4fbb Merge pull request #3254 from github/backport-v3.31.1-5fe9434cd
Merge releases/v4 into releases/v3
2025-10-30 11:00:54 +00:00
github-actions[bot]
c2805e0a04 Rebuild 2025-10-30 10:35:44 +00:00
github-actions[bot]
c0d3370b54 Update version and changelog for v3.31.1 2025-10-30 10:31:02 +00:00
github-actions[bot]
ddd0dc746a Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.1-5fe9434cd 2025-10-30 10:31:01 +00:00
github-actions[bot]
2f607936ce Revert "Rebuild"
This reverts commit 9e3918e481.
2025-10-30 10:31:00 +00:00
github-actions[bot]
37e7dfbaa0 Revert "Update version and changelog for v3.31.0"
This reverts commit 7dd1575dac.
2025-10-30 10:31:00 +00:00
Michael B. Gale
d198d2fabf Merge pull request #3237 from github/backport-v3.31.0-4e94bd11f
Merge releases/v4 into releases/v3
2025-10-24 19:30:34 +01:00
github-actions[bot]
9e3918e481 Rebuild 2025-10-24 17:18:40 +00:00
github-actions[bot]
7dd1575dac Update version and changelog for v3.31.0 2025-10-24 17:11:07 +00:00
github-actions[bot]
28fc48d83c Merge remote-tracking branch 'origin/releases/v4' into backport-v3.31.0-4e94bd11f 2025-10-24 17:11:06 +00:00
github-actions[bot]
12c6008004 Revert "Rebuild"
This reverts commit 5f3f3164ad.
2025-10-24 17:11:05 +00:00
github-actions[bot]
d3019effb0 Revert "Update version and changelog for v3.30.9"
This reverts commit ba42101490.
2025-10-24 17:11:05 +00:00
Henry Mercer
42213152a8 Merge pull request #3216 from github/backport-v3.30.9-16140ae1a
Merge releases/v4 into releases/v3
2025-10-17 17:09:13 +01:00
Henry Mercer
e677e67801 Run setup-codeql on Node 20 for v3 2025-10-17 16:53:18 +01:00
github-actions[bot]
5f3f3164ad Rebuild 2025-10-17 15:34:40 +00:00
github-actions[bot]
ba42101490 Update version and changelog for v3.30.9 2025-10-17 15:24:34 +00:00
github-actions[bot]
f11af5849b Merge remote-tracking branch 'origin/releases/v4' into backport-v3.30.9-16140ae1a 2025-10-17 15:24:33 +00:00
github-actions[bot]
ba5430dc86 Revert "Rebuild"
This reverts commit 948223fe01.
2025-10-17 15:24:33 +00:00
github-actions[bot]
13e883e119 Revert "Update version and changelog for v3.30.8"
This reverts commit a37add20d4.
2025-10-17 15:24:32 +00:00
Michael B. Gale
755f44910c Merge pull request #3201 from github/backport-v3.30.8-2a6736cca
Merge releases/v4 into releases/v3
2025-10-10 18:20:36 +01:00
github-actions[bot]
948223fe01 Rebuild 2025-10-10 16:56:34 +00:00
github-actions[bot]
a37add20d4 Update version and changelog for v3.30.8 2025-10-10 16:50:13 +00:00
github-actions[bot]
ab163cf08b Merge remote-tracking branch 'origin/releases/v4' into backport-v3.30.8-2a6736cca 2025-10-10 16:50:12 +00:00
github-actions[bot]
319796f085 Revert "Rebuild"
This reverts commit c551c50310.
2025-10-10 16:50:12 +00:00
github-actions[bot]
bd1ac56295 Revert "Update version and changelog for v3.30.7"
This reverts commit b264e15259.
2025-10-10 16:50:12 +00:00
Mario Campos
a8d1ac45b9 Merge pull request #3187 from github/backport-v3.30.7-e296a9355
Merge releases/v4 into releases/v3
2025-10-07 10:58:53 -05:00
github-actions[bot]
c551c50310 Rebuild 2025-10-07 15:33:29 +00:00
Mario Campos
01f1a24033 Downgrade action.yml to use Node.js 20 instead of Node.js 24 for v3 2025-10-07 10:29:22 -05:00
github-actions[bot]
b264e15259 Update version and changelog for v3.30.7 2025-10-07 15:23:05 +00:00
79 changed files with 194573 additions and 277516 deletions

View File

@@ -16,5 +16,5 @@ inputs:
Comma separated list of query ids that should NOT be included in this SARIF file. Comma separated list of query ids that should NOT be included in this SARIF file.
runs: runs:
using: node24 using: node20
main: index.js main: index.js

View File

@@ -1,6 +0,0 @@
name: Verify that the best-effort debug artifact scan completed
description: Verifies that the best-effort debug artifact scan completed successfully during tests
runs:
using: node24
main: index.js
post: post.js

View File

@@ -1,2 +0,0 @@
// The main step is a no-op, since we can only verify artifact scan completion in the post step.
console.log("Will verify artifact scan completion in the post step.");

View File

@@ -1,11 +0,0 @@
// Post step - runs after the workflow completes, when artifact scan has finished
const process = require("process");
const scanFinished = process.env.CODEQL_ACTION_ARTIFACT_SCAN_FINISHED;
if (scanFinished !== "true") {
console.error("Error: Best-effort artifact scan did not complete. Expected CODEQL_ACTION_ARTIFACT_SCAN_FINISHED=true");
process.exit(1);
}
console.log("✓ Best-effort artifact scan completed successfully");

View File

@@ -34,7 +34,7 @@ Products:
Environments: Environments:
- **Dotcom** - Impacts CodeQL workflows on `github.com` and/or GitHub Enterprise Cloud with Data Residency. - **Dotcom** - Impacts CodeQL workflows on `github.com`.
- **GHES** - Impacts CodeQL workflows on GitHub Enterprise Server. - **GHES** - Impacts CodeQL workflows on GitHub Enterprise Server.
- **Testing/None** - This change does not impact any CodeQL workflows in production. - **Testing/None** - This change does not impact any CodeQL workflows in production.

View File

@@ -79,7 +79,7 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: ${{ matrix.os }}-zstd-bundle.sarif name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -67,7 +67,7 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -78,7 +78,7 @@ jobs:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -99,7 +99,7 @@ jobs:
with: with:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -76,7 +76,6 @@ jobs:
- uses: ./../action/analyze - uses: ./../action/analyze
env: env:
https_proxy: http://squid-proxy:3128 https_proxy: http://squid-proxy:3128
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
CODEQL_ACTION_TEST_MODE: true CODEQL_ACTION_TEST_MODE: true
container: container:
image: ubuntu:22.04 image: ubuntu:22.04

View File

@@ -64,7 +64,7 @@ jobs:
with: with:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -83,7 +83,7 @@ jobs:
post-processed-sarif-path: ${{ runner.temp }}/post-processed post-processed-sarif-path: ${{ runner.temp }}/post-processed
- name: Upload security SARIF - name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning') if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
@@ -91,14 +91,14 @@ jobs:
retention-days: 7 retention-days: 7
- name: Upload quality SARIF - name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality') if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: ${{ runner.temp }}/results/javascript.quality.sarif path: ${{ runner.temp }}/results/javascript.quality.sarif
retention-days: 7 retention-days: 7
- name: Upload post-processed SARIF - name: Upload post-processed SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json

View File

@@ -56,7 +56,7 @@ jobs:
use-all-platform-bundle: 'false' use-all-platform-bundle: 'false'
setup-kotlin: 'true' setup-kotlin: 'true'
- name: Set up Ruby - name: Set up Ruby
uses: ruby/setup-ruby@ac793fdd38cc468a4dd57246fa9d0e868aba9085 # v1.270.0 uses: ruby/setup-ruby@8aeb6ff8030dd539317f8e1769a044873b56ea71 # v1.268.0
with: with:
ruby-version: 2.6 ruby-version: 2.6
- name: Install Code Scanning integration - name: Install Code Scanning integration

View File

@@ -6,11 +6,6 @@ env:
# Diff informed queries add an additional query filter which is not yet # Diff informed queries add an additional query filter which is not yet
# taken into account by these tests. # taken into account by these tests.
CODEQL_ACTION_DIFF_INFORMED_QUERIES: false CODEQL_ACTION_DIFF_INFORMED_QUERIES: false
# Specify overlay enablement manually to ensure stability around the exclude-from-incremental
# query filter. Here we only enable for the default code scanning suite.
CODEQL_ACTION_OVERLAY_ANALYSIS: true
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
on: on:
push: push:

View File

@@ -58,8 +58,6 @@ jobs:
uses: actions/setup-dotnet@v5 uses: actions/setup-dotnet@v5
with: with:
dotnet-version: '9.x' dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init - uses: ./../action/init
with: with:
tools: ${{ steps.prepare-test.outputs.tools-url }} tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -85,7 +83,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Download all artifacts - name: Download all artifacts
uses: actions/download-artifact@v7 uses: actions/download-artifact@v6
- name: Check expected artifacts exist - name: Check expected artifacts exist
run: | run: |
LANGUAGES="cpp csharp go java javascript python" LANGUAGES="cpp csharp go java javascript python"

View File

@@ -54,8 +54,6 @@ jobs:
uses: actions/setup-dotnet@v5 uses: actions/setup-dotnet@v5
with: with:
dotnet-version: '9.x' dotnet-version: '9.x'
- name: Assert best-effort artifact scan completed
uses: ./../action/.github/actions/verify-debug-artifact-scan-completed
- uses: ./../action/init - uses: ./../action/init
id: init id: init
with: with:
@@ -79,7 +77,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Download all artifacts - name: Download all artifacts
uses: actions/download-artifact@v7 uses: actions/download-artifact@v6
- name: Check expected artifacts exist - name: Check expected artifacts exist
run: | run: |
VERSIONS="stable-v2.20.3 default linked nightly-latest" VERSIONS="stable-v2.20.3 default linked nightly-latest"

View File

@@ -142,7 +142,7 @@ jobs:
token: "${{ secrets.GITHUB_TOKEN }}" token: "${{ secrets.GITHUB_TOKEN }}"
- name: Generate token - name: Generate token
uses: actions/create-github-app-token@v2.2.1 uses: actions/create-github-app-token@v2.2.0
id: app-token id: app-token
with: with:
app-id: ${{ vars.AUTOMATION_APP_ID }} app-id: ${{ vars.AUTOMATION_APP_ID }}

View File

@@ -137,7 +137,7 @@ jobs:
- name: Generate token - name: Generate token
if: github.event_name == 'workflow_dispatch' if: github.event_name == 'workflow_dispatch'
uses: actions/create-github-app-token@v2.2.1 uses: actions/create-github-app-token@v2.2.0
id: app-token id: app-token
with: with:
app-id: ${{ vars.AUTOMATION_APP_ID }} app-id: ${{ vars.AUTOMATION_APP_ID }}

View File

@@ -93,7 +93,7 @@ jobs:
pull-requests: write # needed to create pull request pull-requests: write # needed to create pull request
steps: steps:
- name: Generate token - name: Generate token
uses: actions/create-github-app-token@v2.2.1 uses: actions/create-github-app-token@v2.2.0
id: app-token id: app-token
with: with:
app-id: ${{ vars.AUTOMATION_APP_ID }} app-id: ${{ vars.AUTOMATION_APP_ID }}

View File

@@ -2,64 +2,52 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs. See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## [UNRELEASED] ## 3.31.7 - 05 Dec 2025
No user facing changes.
## 4.31.9 - 16 Dec 2025
No user facing changes.
## 4.31.8 - 11 Dec 2025
- Update default CodeQL bundle version to 2.23.8. [#3354](https://github.com/github/codeql-action/pull/3354)
## 4.31.7 - 05 Dec 2025
- Update default CodeQL bundle version to 2.23.7. [#3343](https://github.com/github/codeql-action/pull/3343) - Update default CodeQL bundle version to 2.23.7. [#3343](https://github.com/github/codeql-action/pull/3343)
## 4.31.6 - 01 Dec 2025 ## 3.31.6 - 01 Dec 2025
No user facing changes. No user facing changes.
## 4.31.5 - 24 Nov 2025 ## 3.31.5 - 24 Nov 2025
- Update default CodeQL bundle version to 2.23.6. [#3321](https://github.com/github/codeql-action/pull/3321) - Update default CodeQL bundle version to 2.23.6. [#3321](https://github.com/github/codeql-action/pull/3321)
## 4.31.4 - 18 Nov 2025 ## 3.31.4 - 18 Nov 2025
No user facing changes. No user facing changes.
## 4.31.3 - 13 Nov 2025 ## 3.31.3 - 13 Nov 2025
- CodeQL Action v3 will be deprecated in December 2026. The Action now logs a warning for customers who are running v3 but could be running v4. For more information, see [Upcoming deprecation of CodeQL Action v3](https://github.blog/changelog/2025-10-28-upcoming-deprecation-of-codeql-action-v3/). - CodeQL Action v3 will be deprecated in December 2026. The Action now logs a warning for customers who are running v3 but could be running v4. For more information, see [Upcoming deprecation of CodeQL Action v3](https://github.blog/changelog/2025-10-28-upcoming-deprecation-of-codeql-action-v3/).
- Update default CodeQL bundle version to 2.23.5. [#3288](https://github.com/github/codeql-action/pull/3288) - Update default CodeQL bundle version to 2.23.5. [#3288](https://github.com/github/codeql-action/pull/3288)
## 4.31.2 - 30 Oct 2025 ## 3.31.2 - 30 Oct 2025
No user facing changes. No user facing changes.
## 4.31.1 - 30 Oct 2025 ## 3.31.1 - 30 Oct 2025
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced. - The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
## 4.31.0 - 24 Oct 2025 ## 3.31.0 - 24 Oct 2025
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223) - Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222) - When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
## 4.30.9 - 17 Oct 2025 ## 3.30.9 - 17 Oct 2025
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205) - Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204) - Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
## 4.30.8 - 10 Oct 2025 ## 3.30.8 - 10 Oct 2025
No user facing changes. No user facing changes.
## 4.30.7 - 06 Oct 2025 ## 3.30.7 - 06 Oct 2025
- [v4+ only] The CodeQL Action now runs on Node.js v24. [#3169](https://github.com/github/codeql-action/pull/3169) No user facing changes.
## 3.30.6 - 02 Oct 2025 ## 3.30.6 - 02 Oct 2025
@@ -295,17 +283,13 @@ No user facing changes.
## 3.26.12 - 07 Oct 2024 ## 3.26.12 - 07 Oct 2024
- _Upcoming breaking change_: Add a deprecation warning for customers using CodeQL version 2.14.5 and earlier. These versions of CodeQL were discontinued on 24 September 2024 alongside GitHub Enterprise Server 3.10, and will be unsupported by CodeQL Action versions 3.27.0 and later and versions 2.27.0 and later. [#2520](https://github.com/github/codeql-action/pull/2520) - _Upcoming breaking change_: Add a deprecation warning for customers using CodeQL version 2.14.5 and earlier. These versions of CodeQL were discontinued on 24 September 2024 alongside GitHub Enterprise Server 3.10, and will be unsupported by CodeQL Action versions 3.27.0 and later and versions 2.27.0 and later. [#2520](https://github.com/github/codeql-action/pull/2520)
- If you are using one of these versions, please update to CodeQL CLI version 2.14.6 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version. - If you are using one of these versions, please update to CodeQL CLI version 2.14.6 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version.
- Alternatively, if you want to continue using a version of the CodeQL CLI between 2.13.5 and 2.14.5, you can replace `github/codeql-action/*@v3` by `github/codeql-action/*@v3.26.11` and `github/codeql-action/*@v2` by `github/codeql-action/*@v2.26.11` in your code scanning workflow to ensure you continue using this version of the CodeQL Action. - Alternatively, if you want to continue using a version of the CodeQL CLI between 2.13.5 and 2.14.5, you can replace `github/codeql-action/*@v3` by `github/codeql-action/*@v3.26.11` and `github/codeql-action/*@v2` by `github/codeql-action/*@v2.26.11` in your code scanning workflow to ensure you continue using this version of the CodeQL Action.
## 3.26.11 - 03 Oct 2024 ## 3.26.11 - 03 Oct 2024
- _Upcoming breaking change_: Add support for using `actions/download-artifact@v4` to programmatically consume CodeQL Action debug artifacts. - _Upcoming breaking change_: Add support for using `actions/download-artifact@v4` to programmatically consume CodeQL Action debug artifacts.
Starting November 30, 2024, GitHub.com customers will [no longer be able to use `actions/download-artifact@v3`](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/). Therefore, to avoid breakage, customers who programmatically download the CodeQL Action debug artifacts should set the `CODEQL_ACTION_ARTIFACT_V4_UPGRADE` environment variable to `true` and bump `actions/download-artifact@v3` to `actions/download-artifact@v4` in their workflows. The CodeQL Action will enable this behavior by default in early November and workflows that have not yet bumped `actions/download-artifact@v3` to `actions/download-artifact@v4` will begin failing then. Starting November 30, 2024, GitHub.com customers will [no longer be able to use `actions/download-artifact@v3`](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/). Therefore, to avoid breakage, customers who programmatically download the CodeQL Action debug artifacts should set the `CODEQL_ACTION_ARTIFACT_V4_UPGRADE` environment variable to `true` and bump `actions/download-artifact@v3` to `actions/download-artifact@v4` in their workflows. The CodeQL Action will enable this behavior by default in early November and workflows that have not yet bumped `actions/download-artifact@v3` to `actions/download-artifact@v4` will begin failing then.
This change is currently unavailable for GitHub Enterprise Server customers, as `actions/upload-artifact@v4` and `actions/download-artifact@v4` are not yet compatible with GHES. This change is currently unavailable for GitHub Enterprise Server customers, as `actions/upload-artifact@v4` and `actions/download-artifact@v4` are not yet compatible with GHES.
- Update default CodeQL bundle version to 2.19.1. [#2519](https://github.com/github/codeql-action/pull/2519) - Update default CodeQL bundle version to 2.19.1. [#2519](https://github.com/github/codeql-action/pull/2519)
@@ -428,12 +412,9 @@ No user facing changes.
## 3.25.0 - 15 Apr 2024 ## 3.25.0 - 15 Apr 2024
- The deprecated feature for extracting dependencies for a Python analysis has been removed. [#2224](https://github.com/github/codeql-action/pull/2224) - The deprecated feature for extracting dependencies for a Python analysis has been removed. [#2224](https://github.com/github/codeql-action/pull/2224)
As a result, the following inputs and environment variables are now ignored: As a result, the following inputs and environment variables are now ignored:
- The `setup-python-dependencies` input to the `init` Action - The `setup-python-dependencies` input to the `init` Action
- The `CODEQL_ACTION_DISABLE_PYTHON_DEPENDENCY_INSTALLATION` environment variable - The `CODEQL_ACTION_DISABLE_PYTHON_DEPENDENCY_INSTALLATION` environment variable
We recommend removing any references to these from your workflows. For more information, see the release notes for CodeQL Action v3.23.0 and v2.23.0. We recommend removing any references to these from your workflows. For more information, see the release notes for CodeQL Action v3.23.0 and v2.23.0.
- Automatically overwrite an existing database if found on the filesystem. [#2229](https://github.com/github/codeql-action/pull/2229) - Automatically overwrite an existing database if found on the filesystem. [#2229](https://github.com/github/codeql-action/pull/2229)
- Bump the minimum CodeQL bundle version to 2.12.6. [#2232](https://github.com/github/codeql-action/pull/2232) - Bump the minimum CodeQL bundle version to 2.12.6. [#2232](https://github.com/github/codeql-action/pull/2232)

View File

@@ -94,6 +94,6 @@ outputs:
sarif-id: sarif-id:
description: The ID of the uploaded SARIF file. description: The ID of the uploaded SARIF file.
runs: runs:
using: node24 using: node20
main: "../lib/analyze-action.js" main: "../lib/analyze-action.js"
post: "../lib/analyze-action-post.js" post: "../lib/analyze-action-post.js"

View File

@@ -15,5 +15,5 @@ inputs:
$GITHUB_WORKSPACE as its working directory. $GITHUB_WORKSPACE as its working directory.
required: false required: false
runs: runs:
using: node24 using: node20
main: '../lib/autobuild-action.js' main: '../lib/autobuild-action.js'

View File

@@ -165,6 +165,6 @@ outputs:
codeql-version: codeql-version:
description: The version of the CodeQL binary used for analysis description: The version of the CodeQL binary used for analysis
runs: runs:
using: node24 using: node20
main: '../lib/init-action.js' main: '../lib/init-action.js'
post: '../lib/init-action-post.js' post: '../lib/init-action-post.js'

43237
lib/analyze-action-post.js generated

File diff suppressed because it is too large Load Diff

37454
lib/analyze-action.js generated

File diff suppressed because it is too large Load Diff

35952
lib/autobuild-action.js generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"bundleVersion": "codeql-bundle-v2.23.8", "bundleVersion": "codeql-bundle-v2.23.7",
"cliVersion": "2.23.8", "cliVersion": "2.23.7",
"priorBundleVersion": "codeql-bundle-v2.23.7", "priorBundleVersion": "codeql-bundle-v2.23.6",
"priorCliVersion": "2.23.7" "priorCliVersion": "2.23.6"
} }

43389
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

37447
lib/init-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

35946
lib/setup-codeql-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

36274
lib/start-proxy-action.js generated

File diff suppressed because it is too large Load Diff

35981
lib/upload-lib.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

35968
lib/upload-sarif-action.js generated

File diff suppressed because it is too large Load Diff

1218
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "codeql", "name": "codeql",
"version": "4.31.10", "version": "3.31.7",
"private": true, "private": true,
"description": "CodeQL action", "description": "CodeQL action",
"scripts": { "scripts": {
@@ -24,12 +24,12 @@
}, },
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/artifact": "^5.0.1", "@actions/artifact": "^4.0.0",
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2", "@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
"@actions/cache": "^5.0.1", "@actions/cache": "^4.1.0",
"@actions/core": "^2.0.1", "@actions/core": "^1.11.1",
"@actions/exec": "^2.0.0", "@actions/exec": "^1.1.1",
"@actions/github": "^6.0.1", "@actions/github": "^6.0.0",
"@actions/glob": "^0.5.0", "@actions/glob": "^0.5.0",
"@actions/http-client": "^3.0.0", "@actions/http-client": "^3.0.0",
"@actions/io": "^2.0.0", "@actions/io": "^2.0.0",
@@ -43,7 +43,7 @@
"js-yaml": "^4.1.1", "js-yaml": "^4.1.1",
"jsonschema": "1.4.1", "jsonschema": "1.4.1",
"long": "^5.3.2", "long": "^5.3.2",
"node-forge": "^1.3.3", "node-forge": "^1.3.2",
"semver": "^7.7.3", "semver": "^7.7.3",
"uuid": "^13.0.0" "uuid": "^13.0.0"
}, },
@@ -51,7 +51,7 @@
"@ava/typescript": "6.0.0", "@ava/typescript": "6.0.0",
"@eslint/compat": "^2.0.0", "@eslint/compat": "^2.0.0",
"@eslint/eslintrc": "^3.3.3", "@eslint/eslintrc": "^3.3.3",
"@eslint/js": "^9.39.2", "@eslint/js": "^9.39.1",
"@microsoft/eslint-formatter-sarif": "^3.1.0", "@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^16.0.0", "@octokit/types": "^16.0.0",
"@types/archiver": "^7.0.0", "@types/archiver": "^7.0.0",
@@ -61,16 +61,16 @@
"@types/node-forge": "^1.3.14", "@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1", "@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0", "@types/sinon": "^21.0.0",
"@typescript-eslint/eslint-plugin": "^8.49.0", "@typescript-eslint/eslint-plugin": "^8.48.0",
"@typescript-eslint/parser": "^8.48.0", "@typescript-eslint/parser": "^8.48.0",
"ava": "^6.4.1", "ava": "^6.4.1",
"esbuild": "^0.27.1", "esbuild": "^0.27.0",
"eslint": "^8.57.1", "eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7", "eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2", "eslint-plugin-filenames": "^1.3.2",
"eslint-plugin-github": "^5.1.8", "eslint-plugin-github": "^5.1.8",
"eslint-plugin-import": "2.29.1", "eslint-plugin-import": "2.29.1",
"eslint-plugin-jsdoc": "^61.5.0", "eslint-plugin-jsdoc": "^61.4.1",
"eslint-plugin-no-async-foreach": "^0.1.1", "eslint-plugin-no-async-foreach": "^0.1.1",
"glob": "^11.1.0", "glob": "^11.1.0",
"nock": "^14.0.10", "nock": "^14.0.10",

View File

@@ -27,7 +27,7 @@ steps:
output: ${{ runner.temp }}/results output: ${{ runner.temp }}/results
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: ${{ matrix.os }}-zstd-bundle.sarif name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -12,7 +12,7 @@ steps:
output: "${{ runner.temp }}/results" output: "${{ runner.temp }}/results"
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif" path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -25,7 +25,7 @@ steps:
output: "${{ runner.temp }}/results" output: "${{ runner.temp }}/results"
upload-database: false upload-database: false
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif" path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -18,7 +18,7 @@ steps:
with: with:
output: "${{ runner.temp }}/results" output: "${{ runner.temp }}/results"
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif" path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -23,7 +23,6 @@ services:
- 3128:3128 - 3128:3128
env: env:
https_proxy: http://squid-proxy:3128 https_proxy: http://squid-proxy:3128
CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION: true
steps: steps:
- uses: ./../action/init - uses: ./../action/init
with: with:

View File

@@ -11,7 +11,7 @@ steps:
with: with:
output: "${{ runner.temp }}/results" output: "${{ runner.temp }}/results"
- name: Upload SARIF - name: Upload SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif" path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -39,7 +39,7 @@ steps:
post-processed-sarif-path: "${{ runner.temp }}/post-processed" post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload security SARIF - name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning') if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
@@ -47,14 +47,14 @@ steps:
retention-days: 7 retention-days: 7
- name: Upload quality SARIF - name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality') if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: "${{ runner.temp }}/results/javascript.quality.sarif" path: "${{ runner.temp }}/results/javascript.quality.sarif"
retention-days: 7 retention-days: 7
- name: Upload post-processed SARIF - name: Upload post-processed SARIF
uses: actions/upload-artifact@v6 uses: actions/upload-artifact@v5
with: with:
name: | name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json

View File

@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
versions: ["default"] versions: ["default"]
steps: steps:
- name: Set up Ruby - name: Set up Ruby
uses: ruby/setup-ruby@ac793fdd38cc468a4dd57246fa9d0e868aba9085 # v1.270.0 uses: ruby/setup-ruby@8aeb6ff8030dd539317f8e1769a044873b56ea71 # v1.268.0
with: with:
ruby-version: 2.6 ruby-version: 2.6
- name: Install Code Scanning integration - name: Install Code Scanning integration

View File

@@ -21,5 +21,5 @@ outputs:
environment: environment:
description: The inferred build environment configuration. description: The inferred build environment configuration.
runs: runs:
using: node24 using: node20
main: '../lib/resolve-environment-action.js' main: '../lib/resolve-environment-action.js'

View File

@@ -35,5 +35,5 @@ outputs:
codeql-version: codeql-version:
description: The version of the CodeQL binary that was installed. description: The version of the CodeQL binary that was installed.
runs: runs:
using: node24 using: node20
main: '../lib/setup-codeql-action.js' main: '../lib/setup-codeql-action.js'

View File

@@ -19,18 +19,20 @@ import { getApiDetails, getGitHubVersion } from "./api-client";
import { runAutobuild } from "./autobuild"; import { runAutobuild } from "./autobuild";
import { getTotalCacheSize, shouldStoreCache } from "./caching-utils"; import { getTotalCacheSize, shouldStoreCache } from "./caching-utils";
import { getCodeQL } from "./codeql"; import { getCodeQL } from "./codeql";
import { Config, getConfig } from "./config-utils";
import { import {
cleanupAndUploadDatabases, Config,
DatabaseUploadResult, getConfig,
} from "./database-upload"; isCodeQualityEnabled,
isCodeScanningEnabled,
} from "./config-utils";
import { cleanupAndUploadDatabases } from "./database-upload";
import { import {
DependencyCacheUploadStatusReport, DependencyCacheUploadStatusReport,
uploadDependencyCaches, uploadDependencyCaches,
} from "./dependency-caching"; } from "./dependency-caching";
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils"; import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
import { Features } from "./feature-flags"; import { Feature, Features } from "./feature-flags";
import { KnownLanguage } from "./languages"; import { KnownLanguage } from "./languages";
import { getActionsLogger, Logger } from "./logging"; import { getActionsLogger, Logger } from "./logging";
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay-database-utils"; import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay-database-utils";
@@ -57,13 +59,15 @@ interface AnalysisStatusReport
extends uploadLib.UploadStatusReport, extends uploadLib.UploadStatusReport,
QueriesStatusReport {} QueriesStatusReport {}
interface DependencyCachingUploadStatusReport {
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
}
interface FinishStatusReport interface FinishStatusReport
extends StatusReportBase, extends StatusReportBase,
DatabaseCreationTimings, DatabaseCreationTimings,
AnalysisStatusReport { AnalysisStatusReport,
dependency_caching_upload_results?: DependencyCacheUploadStatusReport; DependencyCachingUploadStatusReport {}
database_upload_results: DatabaseUploadResult[];
}
interface FinishWithTrapUploadStatusReport extends FinishStatusReport { interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
/** Size of TRAP caches that we uploaded, in bytes. */ /** Size of TRAP caches that we uploaded, in bytes. */
@@ -82,7 +86,6 @@ async function sendStatusReport(
didUploadTrapCaches: boolean, didUploadTrapCaches: boolean,
trapCacheCleanup: TrapCacheCleanupStatusReport | undefined, trapCacheCleanup: TrapCacheCleanupStatusReport | undefined,
dependencyCacheResults: DependencyCacheUploadStatusReport | undefined, dependencyCacheResults: DependencyCacheUploadStatusReport | undefined,
databaseUploadResults: DatabaseUploadResult[],
logger: Logger, logger: Logger,
) { ) {
const status = getActionsStatus(error, stats?.analyze_failure_language); const status = getActionsStatus(error, stats?.analyze_failure_language);
@@ -103,7 +106,6 @@ async function sendStatusReport(
...(dbCreationTimings || {}), ...(dbCreationTimings || {}),
...(trapCacheCleanup || {}), ...(trapCacheCleanup || {}),
dependency_caching_upload_results: dependencyCacheResults, dependency_caching_upload_results: dependencyCacheResults,
database_upload_results: databaseUploadResults,
}; };
if (config && didUploadTrapCaches) { if (config && didUploadTrapCaches) {
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = { const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
@@ -221,7 +223,6 @@ async function run() {
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined; let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
let didUploadTrapCaches = false; let didUploadTrapCaches = false;
let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined; let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined;
let databaseUploadResults: DatabaseUploadResult[] = [];
util.initializeEnvironment(actionsUtil.getActionVersion()); util.initializeEnvironment(actionsUtil.getActionVersion());
// Make inputs accessible in the `post` step, details at // Make inputs accessible in the `post` step, details at
@@ -357,6 +358,7 @@ async function run() {
const checkoutPath = actionsUtil.getRequiredInput("checkout_path"); const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
const category = actionsUtil.getOptionalInput("category"); const category = actionsUtil.getOptionalInput("category");
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
uploadResults = await postProcessAndUploadSarif( uploadResults = await postProcessAndUploadSarif(
logger, logger,
features, features,
@@ -366,6 +368,36 @@ async function run() {
category, category,
actionsUtil.getOptionalInput("post-processed-sarif-path"), actionsUtil.getOptionalInput("post-processed-sarif-path"),
); );
} else if (uploadKind === "always") {
uploadResults = {};
if (isCodeScanningEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeScanning] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeScanning,
);
}
if (isCodeQualityEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeQuality] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeQuality,
);
}
} else {
uploadResults = {};
logger.info("Not uploading results");
}
// Set the SARIF id outputs only if we have results for them, to avoid // Set the SARIF id outputs only if we have results for them, to avoid
// having keys with empty values in the action output. // having keys with empty values in the action output.
@@ -393,7 +425,7 @@ async function run() {
// Possibly upload the database bundles for remote queries. // Possibly upload the database bundles for remote queries.
// Note: Take care with the ordering of this call since databases may be cleaned up // Note: Take care with the ordering of this call since databases may be cleaned up
// at the `overlay` or `clear` level. // at the `overlay` or `clear` level.
databaseUploadResults = await cleanupAndUploadDatabases( await cleanupAndUploadDatabases(
repositoryNwo, repositoryNwo,
codeql, codeql,
config, config,
@@ -465,7 +497,6 @@ async function run() {
didUploadTrapCaches, didUploadTrapCaches,
trapCacheCleanupTelemetry, trapCacheCleanupTelemetry,
dependencyCacheResults, dependencyCacheResults,
databaseUploadResults,
logger, logger,
); );
return; return;
@@ -488,7 +519,6 @@ async function run() {
didUploadTrapCaches, didUploadTrapCaches,
trapCacheCleanupTelemetry, trapCacheCleanupTelemetry,
dependencyCacheResults, dependencyCacheResults,
databaseUploadResults,
logger, logger,
); );
} else if (runStats !== undefined) { } else if (runStats !== undefined) {
@@ -502,7 +532,6 @@ async function run() {
didUploadTrapCaches, didUploadTrapCaches,
trapCacheCleanupTelemetry, trapCacheCleanupTelemetry,
dependencyCacheResults, dependencyCacheResults,
databaseUploadResults,
logger, logger,
); );
} else { } else {
@@ -516,7 +545,6 @@ async function run() {
didUploadTrapCaches, didUploadTrapCaches,
trapCacheCleanupTelemetry, trapCacheCleanupTelemetry,
dependencyCacheResults, dependencyCacheResults,
databaseUploadResults,
logger, logger,
); );
} }

View File

@@ -95,14 +95,14 @@ test("getGitHubVersion for different domain", async (t) => {
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3); t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
}); });
test("getGitHubVersion for GHEC-DR", async (t) => { test("getGitHubVersion for GHE_DOTCOM", async (t) => {
mockGetMetaVersionHeader("ghe.com"); mockGetMetaVersionHeader("ghe.com");
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), { const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "", auth: "",
url: "https://foo.ghe.com", url: "https://foo.ghe.com",
apiURL: undefined, apiURL: undefined,
}); });
t.deepEqual({ type: util.GitHubVariant.GHEC_DR }, gheDotcom); t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
}); });
test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => { test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {

View File

@@ -125,7 +125,7 @@ export async function getGitHubVersionFromApi(
} }
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") { if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
return { type: GitHubVariant.GHEC_DR }; return { type: GitHubVariant.GHE_DOTCOM };
} }
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] as string; const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] as string;

View File

@@ -1,98 +0,0 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import test from "ava";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { getRunnerLogger } from "./logging";
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file with a fake GitHub token
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file with token ghp_1234567890123456789012345678901234AB",
);
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
t.regex(
error?.message || "",
/Found 1 potential GitHub token.*Personal Access Token/,
);
t.regex(error?.message || "", /test\.txt/);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
test("scanArtifactsForTokens handles files without tokens", async (t) => {
const logger = getRunnerLogger(true);
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
try {
// Create a test file without tokens
const testFile = path.join(tempDir, "test.txt");
fs.writeFileSync(
testFile,
"This is a test file without any sensitive data",
);
await t.notThrowsAsync(
async () => await scanArtifactsForTokens([testFile], logger),
);
} finally {
// Clean up
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
if (os.platform() !== "win32") {
test("scanArtifactsForTokens finds token in debug artifacts", async (t) => {
t.timeout(15000); // 15 seconds
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages, { logToConsole: false });
// The zip here is a regression test based on
// https://github.com/github/codeql-action/security/advisories/GHSA-vqf5-2xx6-9wfm
const testZip = path.join(
__dirname,
"..",
"src",
"testdata",
"debug-artifacts-with-fake-token.zip",
);
// This zip file contains a nested structure with a fake token in:
// my-db-java-partial.zip/trap/java/invocations/kotlin.9017231652989744319.trap
const error = await t.throwsAsync(
async () => await scanArtifactsForTokens([testZip], logger),
);
t.regex(
error?.message || "",
/Found.*potential GitHub token/,
"Should detect token in nested zip",
);
t.regex(
error?.message || "",
/kotlin\.9017231652989744319\.trap/,
"Should report the .trap file containing the token",
);
const logOutput = messages.map((msg) => msg.message).join("\n");
t.regex(
logOutput,
/^Extracting gz file: .*\.gz$/m,
"Logs should show that .gz files were extracted",
);
});
}

View File

@@ -1,379 +0,0 @@
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as exec from "@actions/exec";
import { Logger } from "./logging";
import { getErrorMessage } from "./util";
/**
* GitHub token patterns to scan for.
* These patterns match various GitHub token formats.
*/
const GITHUB_TOKEN_PATTERNS = [
{
name: "Personal Access Token",
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
},
{
name: "OAuth Access Token",
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
},
{
name: "User-to-Server Token",
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Server-to-Server Token",
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
},
{
name: "Refresh Token",
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
},
{
name: "App Installation Access Token",
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
},
];
interface TokenFinding {
tokenType: string;
filePath: string;
}
interface ScanResult {
scannedFiles: number;
findings: TokenFinding[];
}
/**
* Scans a file for GitHub tokens.
*
* @param filePath Path to the file to scan
* @param relativePath Relative path for display purposes
* @param logger Logger instance
* @returns Array of token findings in the file
*/
function scanFileForTokens(
filePath: string,
relativePath: string,
logger: Logger,
): TokenFinding[] {
const findings: TokenFinding[] = [];
try {
const content = fs.readFileSync(filePath, "utf8");
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
const matches = content.match(pattern);
if (matches) {
for (let i = 0; i < matches.length; i++) {
findings.push({ tokenType: name, filePath: relativePath });
}
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
}
}
return findings;
} catch (e) {
// If we can't read the file as text, it's likely binary or inaccessible
logger.debug(
`Could not scan file ${filePath} for tokens: ${getErrorMessage(e)}`,
);
return [];
}
}
/**
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param archivePath Path to the archive file
* @param relativeArchivePath Relative path of the archive for display
* @param extractDir Directory to extract to
* @param logger Logger instance
* @param depth Current recursion depth (to prevent infinite loops)
* @returns Scan results
*/
async function scanArchiveFile(
archivePath: string,
relativeArchivePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const MAX_DEPTH = 10; // Prevent infinite recursion
if (depth > MAX_DEPTH) {
throw new Error(
`Maximum archive extraction depth (${MAX_DEPTH}) reached for ${archivePath}`,
);
}
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
try {
const tempExtractDir = fs.mkdtempSync(
path.join(extractDir, `extract-${depth}-`),
);
// Determine archive type and extract accordingly
const fileName = path.basename(archivePath).toLowerCase();
if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) {
// Extract tar.gz files
logger.debug(`Extracting tar.gz file: ${archivePath}`);
await exec.exec("tar", ["-xzf", archivePath, "-C", tempExtractDir], {
silent: true,
});
} else if (fileName.endsWith(".tar.zst")) {
// Extract tar.zst files
logger.debug(`Extracting tar.zst file: ${archivePath}`);
await exec.exec(
"tar",
["--zstd", "-xf", archivePath, "-C", tempExtractDir],
{
silent: true,
},
);
} else if (fileName.endsWith(".zst")) {
// Extract .zst files (single file compression)
logger.debug(`Extracting zst file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".zst"),
);
await exec.exec("zstd", ["-d", archivePath, "-o", outputFile], {
silent: true,
});
} else if (fileName.endsWith(".gz")) {
// Extract .gz files (single file compression)
logger.debug(`Extracting gz file: ${archivePath}`);
const outputFile = path.join(
tempExtractDir,
path.basename(archivePath, ".gz"),
);
await exec.exec("gunzip", ["-c", archivePath], {
outStream: fs.createWriteStream(outputFile),
silent: true,
});
} else if (fileName.endsWith(".zip")) {
// Extract zip files
logger.debug(`Extracting zip file: ${archivePath}`);
await exec.exec(
"unzip",
["-q", "-o", archivePath, "-d", tempExtractDir],
{
silent: true,
},
);
}
// Scan the extracted contents
const scanResult = await scanDirectory(
tempExtractDir,
relativeArchivePath,
logger,
depth + 1,
);
result.scannedFiles += scanResult.scannedFiles;
result.findings.push(...scanResult.findings);
// Clean up extracted files
fs.rmSync(tempExtractDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not extract or scan archive file ${archivePath}: ${getErrorMessage(e)}`,
);
}
return result;
}
/**
* Scans a single file, including recursive archive extraction if applicable.
*
* @param fullPath Full path to the file
* @param relativePath Relative path for display
* @param extractDir Directory to use for extraction (for archive files)
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanFile(
fullPath: string,
relativePath: string,
extractDir: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 1,
findings: [],
};
// Check if it's an archive file and recursively scan it
const fileName = path.basename(fullPath).toLowerCase();
const isArchive =
fileName.endsWith(".zip") ||
fileName.endsWith(".tar.gz") ||
fileName.endsWith(".tgz") ||
fileName.endsWith(".tar.zst") ||
fileName.endsWith(".zst") ||
fileName.endsWith(".gz");
if (isArchive) {
const archiveResult = await scanArchiveFile(
fullPath,
relativePath,
extractDir,
logger,
depth,
);
result.scannedFiles += archiveResult.scannedFiles;
result.findings.push(...archiveResult.findings);
}
// Scan the file itself for tokens (unless it's a pure binary archive format)
const fileFindings = scanFileForTokens(fullPath, relativePath, logger);
result.findings.push(...fileFindings);
return result;
}
/**
* Recursively scans a directory for GitHub tokens.
*
* @param dirPath Directory path to scan
* @param baseRelativePath Base relative path for computing display paths
* @param logger Logger instance
* @param depth Current recursion depth
* @returns Scan results
*/
async function scanDirectory(
dirPath: string,
baseRelativePath: string,
logger: Logger,
depth: number = 0,
): Promise<ScanResult> {
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.join(baseRelativePath, entry.name);
if (entry.isDirectory()) {
const subResult = await scanDirectory(
fullPath,
relativePath,
logger,
depth,
);
result.scannedFiles += subResult.scannedFiles;
result.findings.push(...subResult.findings);
} else if (entry.isFile()) {
const fileResult = await scanFile(
fullPath,
relativePath,
path.dirname(fullPath),
logger,
depth,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
return result;
}
/**
* Scans a list of files and directories for GitHub tokens.
* Recursively extracts and scans archive files (.zip, .gz, .tar.gz).
*
* @param filesToScan List of file paths to scan
* @param logger Logger instance
* @returns Scan results
*/
export async function scanArtifactsForTokens(
filesToScan: string[],
logger: Logger,
): Promise<void> {
logger.info(
"Starting best-effort check for potential GitHub tokens in debug artifacts (for testing purposes only)...",
);
const result: ScanResult = {
scannedFiles: 0,
findings: [],
};
// Create a temporary directory for extraction
const tempScanDir = fs.mkdtempSync(path.join(os.tmpdir(), "artifact-scan-"));
try {
for (const filePath of filesToScan) {
const stats = fs.statSync(filePath);
const fileName = path.basename(filePath);
if (stats.isDirectory()) {
const dirResult = await scanDirectory(filePath, fileName, logger);
result.scannedFiles += dirResult.scannedFiles;
result.findings.push(...dirResult.findings);
} else if (stats.isFile()) {
const fileResult = await scanFile(
filePath,
fileName,
tempScanDir,
logger,
);
result.scannedFiles += fileResult.scannedFiles;
result.findings.push(...fileResult.findings);
}
}
// Compute statistics from findings
const tokenTypesCounts = new Map<string, number>();
const filesWithTokens = new Set<string>();
for (const finding of result.findings) {
tokenTypesCounts.set(
finding.tokenType,
(tokenTypesCounts.get(finding.tokenType) || 0) + 1,
);
filesWithTokens.add(finding.filePath);
}
const tokenTypesSummary = Array.from(tokenTypesCounts.entries())
.map(([type, count]) => `${count} ${type}${count > 1 ? "s" : ""}`)
.join(", ");
const baseSummary = `scanned ${result.scannedFiles} files, found ${result.findings.length} potential token(s) in ${filesWithTokens.size} file(s)`;
const summaryWithTypes = tokenTypesSummary
? `${baseSummary} (${tokenTypesSummary})`
: baseSummary;
logger.info(`Artifact check complete: ${summaryWithTypes}`);
if (result.findings.length > 0) {
const fileList = Array.from(filesWithTokens).join(", ");
throw new Error(
`Found ${result.findings.length} potential GitHub token(s) (${tokenTypesSummary}) in debug artifacts at: ${fileList}. This is a best-effort check for testing purposes only.`,
);
}
} finally {
// Clean up temporary directory
try {
fs.rmSync(tempScanDir, { recursive: true, force: true });
} catch (e) {
logger.debug(
`Could not clean up temporary scan directory: ${getErrorMessage(e)}`,
);
}
}
}

View File

@@ -206,7 +206,6 @@ export interface CodeQL {
* Run 'codeql resolve queries --format=startingpacks'. * Run 'codeql resolve queries --format=startingpacks'.
*/ */
resolveQueriesStartingPacks(queries: string[]): Promise<string[]>; resolveQueriesStartingPacks(queries: string[]): Promise<string[]>;
resolveDatabase(databasePath: string): Promise<ResolveDatabaseOutput>;
/** /**
* Run 'codeql github merge-results'. * Run 'codeql github merge-results'.
*/ */
@@ -231,10 +230,6 @@ export interface VersionInfo {
overlayVersion?: number; overlayVersion?: number;
} }
export interface ResolveDatabaseOutput {
overlayBaseSpecifier?: string;
}
export interface ResolveLanguagesOutput { export interface ResolveLanguagesOutput {
[language: string]: [string]; [language: string]: [string];
} }
@@ -498,7 +493,6 @@ export function createStubCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
partialCodeql, partialCodeql,
"resolveQueriesStartingPacks", "resolveQueriesStartingPacks",
), ),
resolveDatabase: resolveFunction(partialCodeql, "resolveDatabase"),
mergeResults: resolveFunction(partialCodeql, "mergeResults"), mergeResults: resolveFunction(partialCodeql, "mergeResults"),
}; };
} }
@@ -1009,26 +1003,6 @@ async function getCodeQLForCmd(
); );
} }
}, },
async resolveDatabase(
databasePath: string,
): Promise<ResolveDatabaseOutput> {
const codeqlArgs = [
"resolve",
"database",
databasePath,
"--format=json",
...getExtraOptionsFromEnv(["resolve", "database"]),
];
const output = await runCli(cmd, codeqlArgs, { noStreamStdout: true });
try {
return JSON.parse(output) as ResolveDatabaseOutput;
} catch (e) {
throw new Error(
`Unexpected output from codeql resolve database --format=json: ${e}`,
);
}
},
async mergeResults( async mergeResults(
sarifFiles: string[], sarifFiles: string[],
outputFile: string, outputFile: string,

View File

@@ -15,7 +15,6 @@ import * as configUtils from "./config-utils";
import * as errorMessages from "./error-messages"; import * as errorMessages from "./error-messages";
import { Feature } from "./feature-flags"; import { Feature } from "./feature-flags";
import * as gitUtils from "./git-utils"; import * as gitUtils from "./git-utils";
import { GitVersionInfo } from "./git-utils";
import { KnownLanguage, Language } from "./languages"; import { KnownLanguage, Language } from "./languages";
import { getRunnerLogger } from "./logging"; import { getRunnerLogger } from "./logging";
import { import {
@@ -979,7 +978,6 @@ interface OverlayDatabaseModeTestSetup {
languages: Language[]; languages: Language[];
codeqlVersion: string; codeqlVersion: string;
gitRoot: string | undefined; gitRoot: string | undefined;
gitVersion: GitVersionInfo | undefined;
codeScanningConfig: configUtils.UserConfig; codeScanningConfig: configUtils.UserConfig;
diskUsage: DiskUsage | undefined; diskUsage: DiskUsage | undefined;
memoryFlagValue: number; memoryFlagValue: number;
@@ -994,10 +992,6 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
languages: [KnownLanguage.javascript], languages: [KnownLanguage.javascript],
codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION, codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION,
gitRoot: "/some/git/root", gitRoot: "/some/git/root",
gitVersion: new GitVersionInfo(
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
gitUtils.GIT_MINIMUM_VERSION_FOR_OVERLAY,
),
codeScanningConfig: {}, codeScanningConfig: {},
diskUsage: { diskUsage: {
numAvailableBytes: 50_000_000_000, numAvailableBytes: 50_000_000_000,
@@ -1076,7 +1070,6 @@ const getOverlayDatabaseModeMacro = test.macro({
setup.buildMode, setup.buildMode,
undefined, undefined,
setup.codeScanningConfig, setup.codeScanningConfig,
setup.gitVersion,
logger, logger,
); );
@@ -1780,32 +1773,6 @@ test(
}, },
); );
test(
getOverlayDatabaseModeMacro,
"Fallback due to old git version",
{
overlayDatabaseEnvVar: "overlay",
gitVersion: new GitVersionInfo("2.30.0", "2.30.0"), // Version below required 2.38.0
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"Fallback when git version cannot be determined",
{
overlayDatabaseEnvVar: "overlay",
gitVersion: undefined,
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
// Exercise language-specific overlay analysis features code paths // Exercise language-specific overlay analysis features code paths
for (const language in KnownLanguage) { for (const language in KnownLanguage) {
test( test(

View File

@@ -22,19 +22,11 @@ import {
parseUserConfig, parseUserConfig,
UserConfig, UserConfig,
} from "./config/db-config"; } from "./config/db-config";
import { addDiagnostic, makeTelemetryDiagnostic } from "./diagnostics";
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils"; import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
import { EnvVar } from "./environment";
import * as errorMessages from "./error-messages"; import * as errorMessages from "./error-messages";
import { Feature, FeatureEnablement } from "./feature-flags"; import { Feature, FeatureEnablement } from "./feature-flags";
import { RepositoryProperties } from "./feature-flags/properties"; import { RepositoryProperties } from "./feature-flags/properties";
import { import { getGitRoot, isAnalyzingDefaultBranch } from "./git-utils";
getGitRoot,
getGitVersionOrThrow,
GIT_MINIMUM_VERSION_FOR_OVERLAY,
GitVersionInfo,
isAnalyzingDefaultBranch,
} from "./git-utils";
import { KnownLanguage, Language } from "./languages"; import { KnownLanguage, Language } from "./languages";
import { Logger } from "./logging"; import { Logger } from "./logging";
import { import {
@@ -53,8 +45,6 @@ import {
isDefined, isDefined,
checkDiskUsage, checkDiskUsage,
getCodeQLMemoryLimit, getCodeQLMemoryLimit,
getErrorMessage,
isInTestMode,
} from "./util"; } from "./util";
export * from "./config/db-config"; export * from "./config/db-config";
@@ -719,7 +709,6 @@ export async function getOverlayDatabaseMode(
buildMode: BuildMode | undefined, buildMode: BuildMode | undefined,
ramInput: string | undefined, ramInput: string | undefined,
codeScanningConfig: UserConfig, codeScanningConfig: UserConfig,
gitVersion: GitVersionInfo | undefined,
logger: Logger, logger: Logger,
): Promise<{ ): Promise<{
overlayDatabaseMode: OverlayDatabaseMode; overlayDatabaseMode: OverlayDatabaseMode;
@@ -822,22 +811,6 @@ export async function getOverlayDatabaseMode(
); );
return nonOverlayAnalysis; return nonOverlayAnalysis;
} }
if (gitVersion === undefined) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because ` +
"the Git version could not be determined. " +
"Falling back to creating a normal full database instead.",
);
return nonOverlayAnalysis;
}
if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because ` +
`the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. ` +
"Falling back to creating a normal full database instead.",
);
return nonOverlayAnalysis;
}
return { return {
overlayDatabaseMode, overlayDatabaseMode,
@@ -930,24 +903,6 @@ export async function initConfig(
config.computedConfig["query-filters"] = []; config.computedConfig["query-filters"] = [];
} }
let gitVersion: GitVersionInfo | undefined = undefined;
try {
gitVersion = await getGitVersionOrThrow();
logger.info(`Using Git version ${gitVersion.fullVersion}`);
await logGitVersionTelemetry(config, gitVersion);
} catch (e) {
logger.warning(`Could not determine Git version: ${getErrorMessage(e)}`);
// Throw the error in test mode so it's more visible, unless the environment
// variable is set to tolerate this, for example because we're running in a
// Docker container where git may not be available.
if (
isInTestMode() &&
process.env[EnvVar.TOLERATE_MISSING_GIT_VERSION] !== "true"
) {
throw e;
}
}
// The choice of overlay database mode depends on the selection of languages // The choice of overlay database mode depends on the selection of languages
// and queries, which in turn depends on the user config and the augmentation // and queries, which in turn depends on the user config and the augmentation
// properties. So we need to calculate the overlay database mode after the // properties. So we need to calculate the overlay database mode after the
@@ -961,7 +916,6 @@ export async function initConfig(
config.buildMode, config.buildMode,
inputs.ramInput, inputs.ramInput,
config.computedConfig, config.computedConfig,
gitVersion,
logger, logger,
); );
logger.info( logger.info(
@@ -1362,26 +1316,3 @@ export function getPrimaryAnalysisConfig(config: Config): AnalysisConfig {
? CodeScanning ? CodeScanning
: CodeQuality; : CodeQuality;
} }
/** Logs the Git version as a telemetry diagnostic. */
async function logGitVersionTelemetry(
config: Config,
gitVersion: GitVersionInfo,
): Promise<void> {
if (config.languages.length > 0) {
addDiagnostic(
config,
// Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data.
config.languages[0],
makeTelemetryDiagnostic(
"codeql-action/git-version-telemetry",
"Git version telemetry",
{
fullVersion: gitVersion.fullVersion,
truncatedVersion: gitVersion.truncatedVersion,
},
),
);
}
}

View File

@@ -231,7 +231,7 @@ test("Don't crash if uploading a database fails", async (t) => {
(v) => (v) =>
v.type === "warning" && v.type === "warning" &&
v.message === v.message ===
"Failed to upload database for javascript: some error message", "Failed to upload database for javascript: Error: some error message",
) !== undefined, ) !== undefined,
); );
}); });

View File

@@ -13,20 +13,6 @@ import { RepositoryNwo } from "./repository";
import * as util from "./util"; import * as util from "./util";
import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util"; import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util";
/** Information about a database upload. */
export interface DatabaseUploadResult {
/** Language of the database. */
language: string;
/** Size of the zipped database in bytes. */
zipped_upload_size_bytes?: number;
/** Whether the uploaded database is an overlay base. */
is_overlay_base?: boolean;
/** Time taken to upload database in milliseconds. */
upload_duration_ms?: number;
/** If there was an error during database upload, this is its message. */
error?: string;
}
export async function cleanupAndUploadDatabases( export async function cleanupAndUploadDatabases(
repositoryNwo: RepositoryNwo, repositoryNwo: RepositoryNwo,
codeql: CodeQL, codeql: CodeQL,
@@ -34,44 +20,42 @@ export async function cleanupAndUploadDatabases(
apiDetails: GitHubApiDetails, apiDetails: GitHubApiDetails,
features: FeatureEnablement, features: FeatureEnablement,
logger: Logger, logger: Logger,
): Promise<DatabaseUploadResult[]> { ): Promise<void> {
if (actionsUtil.getRequiredInput("upload-database") !== "true") { if (actionsUtil.getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload."); logger.debug("Database upload disabled in workflow. Skipping upload.");
return []; return;
} }
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) { if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
logger.debug( logger.debug(
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`, `Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
); );
return []; return;
} }
if (util.isInTestMode()) { if (util.isInTestMode()) {
logger.debug("In test mode. Skipping database upload."); logger.debug("In test mode. Skipping database upload.");
return []; return;
} }
// Do nothing when not running against github.com // Do nothing when not running against github.com
if ( if (
config.gitHubVersion.type !== util.GitHubVariant.DOTCOM && config.gitHubVersion.type !== util.GitHubVariant.DOTCOM &&
config.gitHubVersion.type !== util.GitHubVariant.GHEC_DR config.gitHubVersion.type !== util.GitHubVariant.GHE_DOTCOM
) { ) {
logger.debug("Not running against github.com or GHEC-DR. Skipping upload."); logger.debug("Not running against github.com or GHEC-DR. Skipping upload.");
return []; return;
} }
if (!(await gitUtils.isAnalyzingDefaultBranch())) { if (!(await gitUtils.isAnalyzingDefaultBranch())) {
// We only want to upload a database if we are analyzing the default branch. // We only want to upload a database if we are analyzing the default branch.
logger.debug("Not analyzing default branch. Skipping upload."); logger.debug("Not analyzing default branch. Skipping upload.");
return []; return;
} }
// If config.overlayDatabaseMode is OverlayBase, then we have overlay base databases for all languages. const cleanupLevel =
const shouldUploadOverlayBase =
config.overlayDatabaseMode === OverlayDatabaseMode.OverlayBase && config.overlayDatabaseMode === OverlayDatabaseMode.OverlayBase &&
(await features.getValue(Feature.UploadOverlayDbToApi)); (await features.getValue(Feature.UploadOverlayDbToApi))
const cleanupLevel = shouldUploadOverlayBase
? CleanupLevel.Overlay ? CleanupLevel.Overlay
: CleanupLevel.Clear; : CleanupLevel.Clear;
@@ -93,22 +77,19 @@ export async function cleanupAndUploadDatabases(
uploadsBaseUrl = uploadsBaseUrl.slice(0, -1); uploadsBaseUrl = uploadsBaseUrl.slice(0, -1);
} }
const reports: DatabaseUploadResult[] = [];
for (const language of config.languages) { for (const language of config.languages) {
let bundledDbSize: number | undefined = undefined;
try { try {
// Upload the database bundle. // Upload the database bundle.
// Although we are uploading arbitrary file contents to the API, it's worth // Although we are uploading arbitrary file contents to the API, it's worth
// noting that it's the API's job to validate that the contents is acceptable. // noting that it's the API's job to validate that the contents is acceptable.
// This API method is available to anyone with write access to the repo. // This API method is available to anyone with write access to the repo.
const bundledDb = await bundleDb(config, language, codeql, language); const bundledDb = await bundleDb(config, language, codeql, language);
bundledDbSize = fs.statSync(bundledDb).size; const bundledDbSize = fs.statSync(bundledDb).size;
const bundledDbReadStream = fs.createReadStream(bundledDb); const bundledDbReadStream = fs.createReadStream(bundledDb);
const commitOid = await gitUtils.getCommitOid( const commitOid = await gitUtils.getCommitOid(
actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("checkout_path"),
); );
try { try {
const startTime = performance.now();
await client.request( await client.request(
`POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`,
{ {
@@ -126,30 +107,13 @@ export async function cleanupAndUploadDatabases(
}, },
}, },
); );
const endTime = performance.now();
reports.push({
language,
zipped_upload_size_bytes: bundledDbSize,
is_overlay_base: shouldUploadOverlayBase,
upload_duration_ms: endTime - startTime,
});
logger.debug(`Successfully uploaded database for ${language}`); logger.debug(`Successfully uploaded database for ${language}`);
} finally { } finally {
bundledDbReadStream.close(); bundledDbReadStream.close();
} }
} catch (e) { } catch (e) {
// Log a warning but don't fail the workflow // Log a warning but don't fail the workflow
logger.warning( logger.warning(`Failed to upload database for ${language}: ${e}`);
`Failed to upload database for ${language}: ${util.getErrorMessage(e)}`,
);
reports.push({
language,
error: util.getErrorMessage(e),
...(bundledDbSize !== undefined
? { zipped_upload_size_bytes: bundledDbSize }
: {}),
});
} }
} }
return reports;
} }

View File

@@ -8,7 +8,6 @@ import archiver from "archiver";
import { getOptionalInput, getTemporaryDirectory } from "./actions-util"; import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
import { dbIsFinalized } from "./analyze"; import { dbIsFinalized } from "./analyze";
import { scanArtifactsForTokens } from "./artifact-scanner";
import { type CodeQL } from "./codeql"; import { type CodeQL } from "./codeql";
import { Config } from "./config-utils"; import { Config } from "./config-utils";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
@@ -24,7 +23,6 @@ import {
getCodeQLDatabasePath, getCodeQLDatabasePath,
getErrorMessage, getErrorMessage,
GitHubVariant, GitHubVariant,
isInTestMode,
listFolder, listFolder,
} from "./util"; } from "./util";
@@ -271,14 +269,6 @@ export async function uploadDebugArtifacts(
return "upload-not-supported"; return "upload-not-supported";
} }
// When running in test mode, perform a best effort scan of the debug artifacts. The artifact
// scanner is basic and not reliable or fast enough for production use, but it can help catch
// some issues early.
if (isInTestMode()) {
await scanArtifactsForTokens(toUpload, logger);
core.exportVariable("CODEQL_ACTION_ARTIFACT_SCAN_FINISHED", "true");
}
let suffix = ""; let suffix = "";
const matrix = getOptionalInput("matrix"); const matrix = getOptionalInput("matrix");
if (matrix) { if (matrix) {

View File

@@ -1,6 +1,6 @@
{ {
"bundleVersion": "codeql-bundle-v2.23.8", "bundleVersion": "codeql-bundle-v2.23.7",
"cliVersion": "2.23.8", "cliVersion": "2.23.7",
"priorBundleVersion": "codeql-bundle-v2.23.7", "priorBundleVersion": "codeql-bundle-v2.23.6",
"priorCliVersion": "2.23.7" "priorCliVersion": "2.23.6"
} }

View File

@@ -603,6 +603,28 @@ test("getFeaturePrefix - returns empty string if no features are enabled", async
} }
}); });
test("getFeaturePrefix - Java - returns 'minify-' if JavaMinimizeDependencyJars is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
const result = await getFeaturePrefix(codeql, features, KnownLanguage.java);
t.deepEqual(result, "minify-");
});
test("getFeaturePrefix - non-Java - returns '' if JavaMinimizeDependencyJars is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.JavaMinimizeDependencyJars]);
for (const knownLanguage of Object.values(KnownLanguage)) {
// Skip Java since we expect a result for it, which is tested in the previous test.
if (knownLanguage === KnownLanguage.java) {
continue;
}
const result = await getFeaturePrefix(codeql, features, knownLanguage);
t.deepEqual(result, "", `Expected no feature prefix for ${knownLanguage}`);
}
});
test("getFeaturePrefix - C# - returns prefix if CsharpNewCacheKey is enabled", async (t) => { test("getFeaturePrefix - C# - returns prefix if CsharpNewCacheKey is enabled", async (t) => {
const codeql = createStubCodeQL({}); const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]); const features = createFeatures([Feature.CsharpNewCacheKey]);

View File

@@ -541,7 +541,18 @@ export async function getFeaturePrefix(
} }
}; };
if (language === KnownLanguage.csharp) { if (language === KnownLanguage.java) {
// To ensure a safe rollout of JAR minimization, we change the key when the feature is enabled.
const minimizeJavaJars = await features.getValue(
Feature.JavaMinimizeDependencyJars,
codeql,
);
// To maintain backwards compatibility with this, we return "minify-" instead of a hash.
if (minimizeJavaJars) {
return "minify-";
}
} else if (language === KnownLanguage.csharp) {
await addFeatureIfEnabled(Feature.CsharpNewCacheKey); await addFeatureIfEnabled(Feature.CsharpNewCacheKey);
await addFeatureIfEnabled(Feature.CsharpCacheBuildModeNone); await addFeatureIfEnabled(Feature.CsharpCacheBuildModeNone);
} }
@@ -582,8 +593,14 @@ async function cachePrefix(
// experimental features that affect the cache contents. // experimental features that affect the cache contents.
const featurePrefix = await getFeaturePrefix(codeql, features, language); const featurePrefix = await getFeaturePrefix(codeql, features, language);
// Assemble the cache key. // Assemble the cache key. For backwards compatibility with the JAR minification experiment's existing
// feature prefix usage, we add that feature prefix at the start. Other feature prefixes are inserted
// after the general CodeQL dependency cache prefix.
if (featurePrefix === "minify-") {
return `${featurePrefix}${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
} else {
return `${prefix}-${featurePrefix}${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`; return `${prefix}-${featurePrefix}${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
}
} }
/** Represents information about our overall cache usage for CodeQL dependency caches. */ /** Represents information about our overall cache usage for CodeQL dependency caches. */

View File

@@ -185,27 +185,3 @@ export function flushDiagnostics(config: Config) {
// Reset the unwritten diagnostics array. // Reset the unwritten diagnostics array.
unwrittenDiagnostics = []; unwrittenDiagnostics = [];
} }
/**
* Creates a telemetry-only diagnostic message. This is a convenience function
* for creating diagnostics that should only be sent to telemetry and not
* displayed on the status page or CLI summary table.
*
* @param id An identifier under which it makes sense to group this diagnostic message
* @param name Display name
* @param attributes Structured metadata
*/
export function makeTelemetryDiagnostic(
id: string,
name: string,
attributes: { [key: string]: any },
): DiagnosticMessage {
return makeDiagnostic(id, name, {
attributes,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
});
}

View File

@@ -129,10 +129,4 @@ export enum EnvVar {
* the workflow is valid and validation is not necessary. * the workflow is valid and validation is not necessary.
*/ */
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION", SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
/**
* Whether to tolerate failure to determine the git version (only applicable in test mode).
* Intended for use in environments where git may not be installed, such as Docker containers.
*/
TOLERATE_MISSING_GIT_VERSION = "CODEQL_ACTION_TOLERATE_MISSING_GIT_VERSION",
} }

View File

@@ -62,13 +62,13 @@ test(`All features are disabled if running against GHES`, async (t) => {
}); });
}); });
test(`Feature flags are requested in GHEC-DR`, async (t) => { test(`Feature flags are requested in Proxima`, async (t) => {
await withTmpDir(async (tmpDir) => { await withTmpDir(async (tmpDir) => {
const loggedMessages = []; const loggedMessages = [];
const features = setUpFeatureFlagTests( const features = setUpFeatureFlagTests(
tmpDir, tmpDir,
getRecordingLogger(loggedMessages), getRecordingLogger(loggedMessages),
{ type: GitHubVariant.GHEC_DR }, { type: GitHubVariant.GHE_DOTCOM },
); );
mockFeatureFlagApiEndpoint(200, initializeFeatures(true)); mockFeatureFlagApiEndpoint(200, initializeFeatures(true));
@@ -436,48 +436,65 @@ test(`selects CLI from defaults.json on GHES`, async (t) => {
}); });
}); });
for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) { test("selects CLI v2.20.1 on Dotcom when feature flags enable v2.20.0 and v2.20.1", async (t) => {
test(`selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`, async (t) => {
await withTmpDir(async (tmpDir) => { await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir); const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true); const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true; expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true; expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] = expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] = false;
false; expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] = false;
expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] = expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] = false;
false; expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] = false;
expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] =
false;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant); const defaultCliVersion = await features.getDefaultCliVersion(
GitHubVariant.DOTCOM,
);
t.deepEqual(defaultCliVersion, { t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1", cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1", tagName: "codeql-bundle-v2.20.1",
toolsFeatureFlagsValid: true, toolsFeatureFlagsValid: true,
}); });
}); });
}); });
test(`selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`, async (t) => { test("includes tag name", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(
GitHubVariant.DOTCOM,
);
t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.0",
tagName: "codeql-bundle-v2.20.0",
toolsFeatureFlagsValid: true,
});
});
});
test(`selects CLI from defaults.json on Dotcom when no default version feature flags are enabled`, async (t) => {
await withTmpDir(async (tmpDir) => { await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir); const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true); const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant); const defaultCliVersion = await features.getDefaultCliVersion(
GitHubVariant.DOTCOM,
);
t.deepEqual(defaultCliVersion, { t.deepEqual(defaultCliVersion, {
cliVersion: defaults.cliVersion, cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion, tagName: defaults.bundleVersion,
toolsFeatureFlagsValid: false, toolsFeatureFlagsValid: false,
}); });
}); });
}); });
test(`ignores invalid version numbers in default version feature flags on ${variant}`, async (t) => { test("ignores invalid version numbers in default version feature flags", async (t) => {
await withTmpDir(async (tmpDir) => { await withTmpDir(async (tmpDir) => {
const loggedMessages = []; const loggedMessages = [];
const features = setUpFeatureFlagTests( const features = setUpFeatureFlagTests(
@@ -491,7 +508,9 @@ for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) {
true; true;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant); const defaultCliVersion = await features.getDefaultCliVersion(
GitHubVariant.DOTCOM,
);
t.deepEqual(defaultCliVersion, { t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1", cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1", tagName: "codeql-bundle-v2.20.1",
@@ -507,8 +526,7 @@ for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) {
) !== undefined, ) !== undefined,
); );
}); });
}); });
}
test("legacy feature flags should end with _enabled", async (t) => { test("legacy feature flags should end with _enabled", async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) { for (const [feature, config] of Object.entries(featureConfig)) {

View File

@@ -44,6 +44,7 @@ export interface FeatureEnablement {
*/ */
export enum Feature { export enum Feature {
AllowToolcacheInput = "allow_toolcache_input", AllowToolcacheInput = "allow_toolcache_input",
AnalyzeUseNewUpload = "analyze_use_new_upload",
CleanupTrapCaches = "cleanup_trap_caches", CleanupTrapCaches = "cleanup_trap_caches",
CppDependencyInstallation = "cpp_dependency_installation_enabled", CppDependencyInstallation = "cpp_dependency_installation_enabled",
CsharpCacheBuildModeNone = "csharp_cache_bmn", CsharpCacheBuildModeNone = "csharp_cache_bmn",
@@ -53,6 +54,7 @@ export enum Feature {
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled", DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled", DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
ExportDiagnosticsEnabled = "export_diagnostics_enabled", ExportDiagnosticsEnabled = "export_diagnostics_enabled",
JavaMinimizeDependencyJars = "java_minimize_dependency_jars",
OverlayAnalysis = "overlay_analysis", OverlayAnalysis = "overlay_analysis",
OverlayAnalysisActions = "overlay_analysis_actions", OverlayAnalysisActions = "overlay_analysis_actions",
OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions", OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions",
@@ -118,6 +120,11 @@ export const featureConfig: Record<
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT", envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: undefined, minimumVersion: undefined,
}, },
[Feature.AnalyzeUseNewUpload]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: undefined,
},
[Feature.CleanupTrapCaches]: { [Feature.CleanupTrapCaches]: {
defaultValue: false, defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES", envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -167,6 +174,11 @@ export const featureConfig: Record<
legacyApi: true, legacyApi: true,
minimumVersion: undefined, minimumVersion: undefined,
}, },
[Feature.JavaMinimizeDependencyJars]: {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0",
},
[Feature.OverlayAnalysis]: { [Feature.OverlayAnalysis]: {
defaultValue: false, defaultValue: false,
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",
@@ -293,7 +305,6 @@ export const featureConfig: Record<
defaultValue: false, defaultValue: false,
envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API", envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API",
minimumVersion: undefined, minimumVersion: undefined,
toolsFeature: ToolsFeature.BundleSupportsOverlay,
}, },
[Feature.UseRepositoryProperties]: { [Feature.UseRepositoryProperties]: {
defaultValue: false, defaultValue: false,
@@ -487,8 +498,8 @@ class GitHubFeatureFlags {
async getDefaultCliVersion( async getDefaultCliVersion(
variant: util.GitHubVariant, variant: util.GitHubVariant,
): Promise<CodeQLDefaultVersionInfo> { ): Promise<CodeQLDefaultVersionInfo> {
if (supportsFeatureFlags(variant)) { if (variant === util.GitHubVariant.DOTCOM) {
return await this.getDefaultCliVersionFromFlags(); return await this.getDefaultDotcomCliVersion();
} }
return { return {
cliVersion: defaults.cliVersion, cliVersion: defaults.cliVersion,
@@ -496,7 +507,7 @@ class GitHubFeatureFlags {
}; };
} }
async getDefaultCliVersionFromFlags(): Promise<CodeQLDefaultVersionInfo> { async getDefaultDotcomCliVersion(): Promise<CodeQLDefaultVersionInfo> {
const response = await this.getAllFeatures(); const response = await this.getAllFeatures();
const enabledFeatureFlagCliVersions = Object.entries(response) const enabledFeatureFlagCliVersions = Object.entries(response)
@@ -622,7 +633,10 @@ class GitHubFeatureFlags {
private async loadApiResponse(): Promise<GitHubFeatureFlagsApiResponse> { private async loadApiResponse(): Promise<GitHubFeatureFlagsApiResponse> {
// Do nothing when not running against github.com // Do nothing when not running against github.com
if (!supportsFeatureFlags(this.gitHubVersion.type)) { if (
this.gitHubVersion.type !== util.GitHubVariant.DOTCOM &&
this.gitHubVersion.type !== util.GitHubVariant.GHE_DOTCOM
) {
this.logger.debug( this.logger.debug(
"Not running against github.com. Disabling all toggleable features.", "Not running against github.com. Disabling all toggleable features.",
); );
@@ -688,10 +702,3 @@ class GitHubFeatureFlags {
} }
} }
} }
function supportsFeatureFlags(githubVariant: util.GitHubVariant): boolean {
return (
githubVariant === util.GitHubVariant.DOTCOM ||
githubVariant === util.GitHubVariant.GHEC_DR
);
}

View File

@@ -1,5 +1,4 @@
import * as fs from "fs"; import * as fs from "fs";
import * as os from "os";
import * as path from "path"; import * as path from "path";
import * as core from "@actions/core"; import * as core from "@actions/core";
@@ -316,6 +315,7 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => {
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
); );
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path"); const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, { t.deepEqual(result, {
@@ -329,10 +329,13 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => {
["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"], ["ls-files", "--recurse-submodules", "--format=%(objectname)_%(path)"],
"Cannot list Git OIDs of tracked files.", "Cannot list Git OIDs of tracked files.",
]); ]);
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath handles quoted paths", async (t) => { test("getFileOidsUnderPath handles quoted paths", async (t) => {
sinon const runGitCommandStub = sinon
.stub(gitUtils as any, "runGitCommand") .stub(gitUtils as any, "runGitCommand")
.resolves( .resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" + "30d998ded095371488be3a729eb61d86ed721a18_lib/normal-file.js\n" +
@@ -340,6 +343,7 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => {
'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"', 'a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_"lib/file\\twith\\ttabs.js"',
); );
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path"); const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, { t.deepEqual(result, {
@@ -347,17 +351,26 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => {
"lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827", "lib/file with spaces.js": "d89514599a9a99f22b4085766d40af7b99974827",
"lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96", "lib/file\twith\ttabs.js": "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96",
}); });
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath handles empty output", async (t) => { test("getFileOidsUnderPath handles empty output", async (t) => {
sinon.stub(gitUtils as any, "runGitCommand").resolves(""); const runGitCommandStub = sinon
.stub(gitUtils as any, "runGitCommand")
.resolves("");
try {
const result = await gitUtils.getFileOidsUnderPath("/fake/path"); const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, {}); t.deepEqual(result, {});
} finally {
runGitCommandStub.restore();
}
}); });
test("getFileOidsUnderPath throws on unexpected output format", async (t) => { test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
sinon const runGitCommandStub = sinon
.stub(gitUtils as any, "runGitCommand") .stub(gitUtils as any, "runGitCommand")
.resolves( .resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" + "30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
@@ -365,6 +378,7 @@ test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
); );
try {
await t.throwsAsync( await t.throwsAsync(
async () => { async () => {
await gitUtils.getFileOidsUnderPath("/fake/path"); await gitUtils.getFileOidsUnderPath("/fake/path");
@@ -374,64 +388,7 @@ test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
message: 'Unexpected "git ls-files" output: invalid-line-format', message: 'Unexpected "git ls-files" output: invalid-line-format',
}, },
); );
}); } finally {
runGitCommandStub.restore();
test("getGitVersionOrThrow returns version for valid git output", async (t) => { }
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(`git version 2.40.0${os.EOL}`);
const version = await gitUtils.getGitVersionOrThrow();
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0");
});
test("getGitVersionOrThrow throws for invalid git output", async (t) => {
sinon.stub(gitUtils as any, "runGitCommand").resolves("invalid output");
await t.throwsAsync(
async () => {
await gitUtils.getGitVersionOrThrow();
},
{
instanceOf: Error,
message: "Could not parse Git version from output: invalid output",
},
);
});
test("getGitVersionOrThrow handles Windows-style git output", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves("git version 2.40.0.windows.1");
const version = await gitUtils.getGitVersionOrThrow();
// The truncated version should contain just the major.minor.patch portion
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0.windows.1");
});
test("getGitVersionOrThrow throws when git command fails", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.rejects(new Error("git not found"));
await t.throwsAsync(
async () => {
await gitUtils.getGitVersionOrThrow();
},
{
instanceOf: Error,
message: "git not found",
},
);
});
test("GitVersionInfo.isAtLeast correctly compares versions", async (t) => {
const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0");
t.true(version.isAtLeast("2.38.0"));
t.true(version.isAtLeast("2.40.0"));
t.false(version.isAtLeast("2.41.0"));
t.false(version.isAtLeast("3.0.0"));
}); });

View File

@@ -1,7 +1,6 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as toolrunner from "@actions/exec/lib/toolrunner"; import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as io from "@actions/io"; import * as io from "@actions/io";
import * as semver from "semver";
import { import {
getOptionalInput, getOptionalInput,
@@ -10,52 +9,6 @@ import {
} from "./actions-util"; } from "./actions-util";
import { ConfigurationError, getRequiredEnvParam } from "./util"; import { ConfigurationError, getRequiredEnvParam } from "./util";
/**
* Minimum Git version required for overlay analysis. The `git ls-files --format`
* option, which is used by `getFileOidsUnderPath`, was introduced in Git 2.38.0.
*/
export const GIT_MINIMUM_VERSION_FOR_OVERLAY = "2.38.0";
/**
* Git version information
*
* The full version string as reported by `git --version` may not be
* semver-compatible (e.g., "2.40.0.windows.1"). This class captures both
* the full version string and a truncated semver-compatible version string
* (e.g., "2.40.0").
*/
export class GitVersionInfo {
constructor(
/** Truncated semver-compatible version */
public truncatedVersion: string,
/** Full version string as reported by `git --version` */
public fullVersion: string,
) {}
isAtLeast(minVersion: string): boolean {
return semver.gte(this.truncatedVersion, minVersion);
}
}
/**
* Gets the version of Git installed on the system and throws an error if
* the version cannot be determined.
*/
export async function getGitVersionOrThrow(): Promise<GitVersionInfo> {
const stdout = await runGitCommand(
undefined,
["--version"],
"Failed to get git version.",
);
// Git version output can vary: "git version 2.40.0" or "git version 2.40.0.windows.1"
// We capture just the major.minor.patch portion to ensure semver compatibility.
const match = stdout.trim().match(/^git version ((\d+\.\d+\.\d+).*)$/);
if (match?.[1] && match?.[2]) {
return new GitVersionInfo(match[2], match[1]);
}
throw new Error(`Could not parse Git version from output: ${stdout.trim()}`);
}
export const runGitCommand = async function ( export const runGitCommand = async function (
workingDirectory: string | undefined, workingDirectory: string | undefined,
args: string[], args: string[],

View File

@@ -33,7 +33,6 @@ import {
flushDiagnostics, flushDiagnostics,
logUnwrittenDiagnostics, logUnwrittenDiagnostics,
makeDiagnostic, makeDiagnostic,
makeTelemetryDiagnostic,
} from "./diagnostics"; } from "./diagnostics";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
import { Feature, Features } from "./feature-flags"; import { Feature, Features } from "./feature-flags";
@@ -89,13 +88,6 @@ import {
} from "./util"; } from "./util";
import { checkWorkflow } from "./workflow"; import { checkWorkflow } from "./workflow";
/**
* First version of CodeQL where the Java extractor safely supports the option to minimize
* dependency jars. Note: some earlier versions of the extractor will respond to the corresponding
* option, but may rewrite jars in ways that lead to extraction errors.
*/
export const CODEQL_VERSION_JAR_MINIMIZATION = "2.23.0";
/** /**
* Sends a status report indicating that the `init` Action is starting. * Sends a status report indicating that the `init` Action is starting.
* *
@@ -426,10 +418,17 @@ async function run() {
// Arbitrarily choose the first language. We could also choose all languages, but that // Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data. // increases the risk of misinterpreting the data.
config.languages[0], config.languages[0],
makeTelemetryDiagnostic( makeDiagnostic(
"codeql-action/bundle-download-telemetry", "codeql-action/bundle-download-telemetry",
"CodeQL bundle download telemetry", "CodeQL bundle download telemetry",
toolsDownloadStatusReport, {
attributes: toolsDownloadStatusReport,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
},
), ),
); );
} }
@@ -639,20 +638,18 @@ async function run() {
} }
} }
// If we are doing a Java `build-mode: none` analysis, then set the environment variable that // If the feature flag to minimize Java dependency jars is enabled, and we are doing a Java
// enables the option in the Java extractor to minimize dependency jars. We also only do this if // `build-mode: none` analysis (i.e. the flag is relevant), then set the environment variable
// dependency caching is enabled, since the option is intended to reduce the size of dependency // that enables the corresponding option in the Java extractor. We also only do this if
// caches, but the jar-rewriting does have a performance cost that we'd like to avoid when // dependency caching is enabled, since the option is intended to reduce the size of
// caching is not being used. // dependency caches, but the jar-rewriting does have a performance cost that we'd like to avoid
// TODO: Remove this language-specific mechanism and replace it with a more general one that // when caching is not being used.
// tells extractors when dependency caching is enabled, and then the Java extractor can make its
// own decision about whether to rewrite jars.
if (process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]) { if (process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]) {
logger.debug( logger.debug(
`${EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS} is already set to '${process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]}', so the Action will not override it.`, `${EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS} is already set to '${process.env[EnvVar.JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS]}', so the Action will not override it.`,
); );
} else if ( } else if (
(await codeQlVersionAtLeast(codeql, CODEQL_VERSION_JAR_MINIMIZATION)) && (await features.getValue(Feature.JavaMinimizeDependencyJars, codeql)) &&
config.dependencyCachingEnabled && config.dependencyCachingEnabled &&
config.buildMode === BuildMode.None && config.buildMode === BuildMode.None &&
config.languages.includes(KnownLanguage.java) config.languages.includes(KnownLanguage.java)
@@ -788,10 +785,17 @@ async function recordZstdAvailability(
// Arbitrarily choose the first language. We could also choose all languages, but that // Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data. // increases the risk of misinterpreting the data.
config.languages[0], config.languages[0],
makeTelemetryDiagnostic( makeDiagnostic(
"codeql-action/zstd-availability", "codeql-action/zstd-availability",
"Zstandard availability", "Zstandard availability",
zstdAvailability, {
attributes: zstdAvailability,
visibility: {
cliSummaryTable: false,
statusPage: false,
telemetry: true,
},
},
), ),
); );
} }

View File

@@ -7,9 +7,7 @@ import * as sinon from "sinon";
import * as actionsUtil from "./actions-util"; import * as actionsUtil from "./actions-util";
import * as apiClient from "./api-client"; import * as apiClient from "./api-client";
import { ResolveDatabaseOutput } from "./codeql";
import * as gitUtils from "./git-utils"; import * as gitUtils from "./git-utils";
import { KnownLanguage } from "./languages";
import { getRunnerLogger } from "./logging"; import { getRunnerLogger } from "./logging";
import { import {
downloadOverlayBaseDatabaseFromCache, downloadOverlayBaseDatabaseFromCache,
@@ -97,7 +95,6 @@ interface DownloadOverlayBaseDatabaseTestCase {
hasBaseDatabaseOidsFile: boolean; hasBaseDatabaseOidsFile: boolean;
tryGetFolderBytesSucceeds: boolean; tryGetFolderBytesSucceeds: boolean;
codeQLVersion: string; codeQLVersion: string;
resolveDatabaseOutput: ResolveDatabaseOutput | Error;
} }
const defaultDownloadTestCase: DownloadOverlayBaseDatabaseTestCase = { const defaultDownloadTestCase: DownloadOverlayBaseDatabaseTestCase = {
@@ -108,7 +105,6 @@ const defaultDownloadTestCase: DownloadOverlayBaseDatabaseTestCase = {
hasBaseDatabaseOidsFile: true, hasBaseDatabaseOidsFile: true,
tryGetFolderBytesSucceeds: true, tryGetFolderBytesSucceeds: true,
codeQLVersion: "2.20.5", codeQLVersion: "2.20.5",
resolveDatabaseOutput: { overlayBaseSpecifier: "20250626:XXX" },
}; };
const testDownloadOverlayBaseDatabaseFromCache = test.macro({ const testDownloadOverlayBaseDatabaseFromCache = test.macro({
@@ -123,11 +119,9 @@ const testDownloadOverlayBaseDatabaseFromCache = test.macro({
await fs.promises.mkdir(dbLocation, { recursive: true }); await fs.promises.mkdir(dbLocation, { recursive: true });
const logger = getRunnerLogger(true); const logger = getRunnerLogger(true);
const config = createTestConfig({ dbLocation });
const testCase = { ...defaultDownloadTestCase, ...partialTestCase }; const testCase = { ...defaultDownloadTestCase, ...partialTestCase };
const config = createTestConfig({
dbLocation,
languages: [KnownLanguage.java],
});
config.overlayDatabaseMode = testCase.overlayDatabaseMode; config.overlayDatabaseMode = testCase.overlayDatabaseMode;
config.useOverlayDatabaseCaching = testCase.useOverlayDatabaseCaching; config.useOverlayDatabaseCaching = testCase.useOverlayDatabaseCaching;
@@ -169,23 +163,9 @@ const testDownloadOverlayBaseDatabaseFromCache = test.macro({
.resolves(testCase.tryGetFolderBytesSucceeds ? 1024 * 1024 : undefined); .resolves(testCase.tryGetFolderBytesSucceeds ? 1024 * 1024 : undefined);
stubs.push(tryGetFolderBytesStub); stubs.push(tryGetFolderBytesStub);
const codeql = mockCodeQLVersion(testCase.codeQLVersion);
if (testCase.resolveDatabaseOutput instanceof Error) {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.rejects(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
} else {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.resolves(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
}
try { try {
const result = await downloadOverlayBaseDatabaseFromCache( const result = await downloadOverlayBaseDatabaseFromCache(
codeql, mockCodeQLVersion(testCase.codeQLVersion),
config, config,
logger, logger,
); );
@@ -275,24 +255,6 @@ test(
false, false,
); );
test(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database doesn't have an overlayBaseSpecifier",
{
resolveDatabaseOutput: {},
},
false,
);
test(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when resolving database metadata fails",
{
resolveDatabaseOutput: new Error("Failed to resolve database metadata"),
},
false,
);
test( test(
testDownloadOverlayBaseDatabaseFromCache, testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when filesystem error occurs", "returns undefined when filesystem error occurs",

View File

@@ -17,7 +17,6 @@ import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
import { Logger, withGroupAsync } from "./logging"; import { Logger, withGroupAsync } from "./logging";
import { import {
CleanupLevel, CleanupLevel,
getCodeQLDatabasePath,
getErrorMessage, getErrorMessage,
isInTestMode, isInTestMode,
tryGetFolderBytes, tryGetFolderBytes,
@@ -177,12 +176,11 @@ const MAX_CACHE_OPERATION_MS = 600_000;
* @param warningPrefix Prefix for the check failure warning message * @param warningPrefix Prefix for the check failure warning message
* @returns True if the verification succeeded, false otherwise * @returns True if the verification succeeded, false otherwise
*/ */
async function checkOverlayBaseDatabase( function checkOverlayBaseDatabase(
codeql: CodeQL,
config: Config, config: Config,
logger: Logger, logger: Logger,
warningPrefix: string, warningPrefix: string,
): Promise<boolean> { ): boolean {
// An overlay-base database should contain the base database OIDs file. // An overlay-base database should contain the base database OIDs file.
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config); const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs.existsSync(baseDatabaseOidsFilePath)) { if (!fs.existsSync(baseDatabaseOidsFilePath)) {
@@ -191,29 +189,6 @@ async function checkOverlayBaseDatabase(
); );
return false; return false;
} }
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (
resolveDatabaseOutput === undefined ||
!("overlayBaseSpecifier" in resolveDatabaseOutput)
) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ` +
`${resolveDatabaseOutput.overlayBaseSpecifier}`,
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true; return true;
} }
@@ -257,8 +232,7 @@ export async function cleanupAndUploadOverlayBaseDatabaseToCache(
return false; return false;
} }
const databaseIsValid = await checkOverlayBaseDatabase( const databaseIsValid = checkOverlayBaseDatabase(
codeql,
config, config,
logger, logger,
"Abort uploading overlay-base database to cache", "Abort uploading overlay-base database to cache",
@@ -441,8 +415,7 @@ export async function downloadOverlayBaseDatabaseFromCache(
return undefined; return undefined;
} }
const databaseIsValid = await checkOverlayBaseDatabase( const databaseIsValid = checkOverlayBaseDatabase(
codeql,
config, config,
logger, logger,
"Downloaded overlay-base database is invalid", "Downloaded overlay-base database is invalid",

View File

@@ -511,7 +511,7 @@ export async function getCodeQLSource(
// different version to save download time if the version hasn't been // different version to save download time if the version hasn't been
// specified explicitly (in which case we always honor it). // specified explicitly (in which case we always honor it).
if ( if (
variant === util.GitHubVariant.GHES && variant !== util.GitHubVariant.DOTCOM &&
!forceShippedTools && !forceShippedTools &&
!toolsInput !toolsInput
) { ) {

Binary file not shown.

View File

@@ -152,38 +152,27 @@ export interface LoggedMessage {
message: string | Error; message: string | Error;
} }
export function getRecordingLogger( export function getRecordingLogger(messages: LoggedMessage[]): Logger {
messages: LoggedMessage[],
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
): Logger {
return { return {
debug: (message: string) => { debug: (message: string) => {
messages.push({ type: "debug", message }); messages.push({ type: "debug", message });
if (logToConsole) {
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.debug(message); console.debug(message);
}
}, },
info: (message: string) => { info: (message: string) => {
messages.push({ type: "info", message }); messages.push({ type: "info", message });
if (logToConsole) {
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.info(message); console.info(message);
}
}, },
warning: (message: string | Error) => { warning: (message: string | Error) => {
messages.push({ type: "warning", message }); messages.push({ type: "warning", message });
if (logToConsole) {
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.warn(message); console.warn(message);
}
}, },
error: (message: string | Error) => { error: (message: string | Error) => {
messages.push({ type: "error", message }); messages.push({ type: "error", message });
if (logToConsole) {
// eslint-disable-next-line no-console // eslint-disable-next-line no-console
console.error(message); console.error(message);
}
}, },
isDebug: () => true, isDebug: () => true,
startGroup: () => undefined, startGroup: () => undefined,

View File

@@ -4,7 +4,6 @@ import type { VersionInfo } from "./codeql";
export enum ToolsFeature { export enum ToolsFeature {
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries", BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
BundleSupportsOverlay = "bundleSupportsOverlay",
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty", DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
ForceOverwrite = "forceOverwrite", ForceOverwrite = "forceOverwrite",
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries", IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",

View File

@@ -433,8 +433,8 @@ function formatGitHubVersion(version: util.GitHubVersion): string {
switch (version.type) { switch (version.type) {
case util.GitHubVariant.DOTCOM: case util.GitHubVariant.DOTCOM:
return "dotcom"; return "dotcom";
case util.GitHubVariant.GHEC_DR: case util.GitHubVariant.GHE_DOTCOM:
return "GHEC-DR"; return "GHE dotcom";
case util.GitHubVariant.GHES: case util.GitHubVariant.GHES:
return `GHES ${version.version}`; return `GHES ${version.version}`;
default: default:
@@ -445,12 +445,12 @@ function formatGitHubVersion(version: util.GitHubVersion): string {
const CHECK_ACTION_VERSION_TESTS: Array<[string, util.GitHubVersion, boolean]> = const CHECK_ACTION_VERSION_TESTS: Array<[string, util.GitHubVersion, boolean]> =
[ [
["2.2.1", { type: util.GitHubVariant.DOTCOM }, true], ["2.2.1", { type: util.GitHubVariant.DOTCOM }, true],
["2.2.1", { type: util.GitHubVariant.GHEC_DR }, true], ["2.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, true],
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false], ["2.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false],
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false], ["2.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false],
["2.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false], ["2.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false],
["3.2.1", { type: util.GitHubVariant.DOTCOM }, true], ["3.2.1", { type: util.GitHubVariant.DOTCOM }, true],
["3.2.1", { type: util.GitHubVariant.GHEC_DR }, true], ["3.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, true],
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false], ["3.2.1", { type: util.GitHubVariant.GHES, version: "3.10" }, false],
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false], ["3.2.1", { type: util.GitHubVariant.GHES, version: "3.11" }, false],
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false], ["3.2.1", { type: util.GitHubVariant.GHES, version: "3.12" }, false],
@@ -458,7 +458,7 @@ const CHECK_ACTION_VERSION_TESTS: Array<[string, util.GitHubVersion, boolean]> =
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, true], ["3.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, true],
["3.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, true], ["3.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, true],
["4.2.1", { type: util.GitHubVariant.DOTCOM }, false], ["4.2.1", { type: util.GitHubVariant.DOTCOM }, false],
["4.2.1", { type: util.GitHubVariant.GHEC_DR }, false], ["4.2.1", { type: util.GitHubVariant.GHE_DOTCOM }, false],
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.19" }, false], ["4.2.1", { type: util.GitHubVariant.GHES, version: "3.19" }, false],
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, false], ["4.2.1", { type: util.GitHubVariant.GHES, version: "3.20" }, false],
["4.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, false], ["4.2.1", { type: util.GitHubVariant.GHES, version: "3.21" }, false],

View File

@@ -556,17 +556,13 @@ const CODEQL_ACTION_WARNED_ABOUT_VERSION_ENV_VAR =
let hasBeenWarnedAboutVersion = false; let hasBeenWarnedAboutVersion = false;
export enum GitHubVariant { export enum GitHubVariant {
/** [GitHub.com](https://github.com) */ DOTCOM,
DOTCOM = "GitHub.com", GHES,
/** [GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@latest/admin/overview/about-github-enterprise-server) */ GHE_DOTCOM,
GHES = "GitHub Enterprise Server",
/** [GitHub Enterprise Cloud with data residency](https://docs.github.com/en/enterprise-cloud@latest/admin/data-residency/about-github-enterprise-cloud-with-data-residency) */
GHEC_DR = "GitHub Enterprise Cloud with data residency",
} }
export type GitHubVersion = export type GitHubVersion =
| { type: GitHubVariant.DOTCOM } | { type: GitHubVariant.DOTCOM }
| { type: GitHubVariant.GHEC_DR } | { type: GitHubVariant.GHE_DOTCOM }
| { type: GitHubVariant.GHES; version: string }; | { type: GitHubVariant.GHES; version: string };
export function checkGitHubVersionInRange( export function checkGitHubVersionInRange(
@@ -1109,7 +1105,7 @@ export function checkActionVersion(
// and should update to CodeQL Action v4. // and should update to CodeQL Action v4.
if ( if (
githubVersion.type === GitHubVariant.DOTCOM || githubVersion.type === GitHubVariant.DOTCOM ||
githubVersion.type === GitHubVariant.GHEC_DR || githubVersion.type === GitHubVariant.GHE_DOTCOM ||
(githubVersion.type === GitHubVariant.GHES && (githubVersion.type === GitHubVariant.GHES &&
semver.satisfies( semver.satisfies(
semver.coerce(githubVersion.version) ?? "0.0.0", semver.coerce(githubVersion.version) ?? "0.0.0",

View File

@@ -29,6 +29,6 @@ outputs:
proxy_urls: proxy_urls:
description: A stringified JSON array of objects containing the types and URLs of the configured registries. description: A stringified JSON array of objects containing the types and URLs of the configured registries.
runs: runs:
using: node24 using: node20
main: "../lib/start-proxy-action.js" main: "../lib/start-proxy-action.js"
post: "../lib/start-proxy-action-post.js" post: "../lib/start-proxy-action-post.js"

View File

@@ -41,6 +41,6 @@ outputs:
{ "code-scanning": "some-id", "code-quality": "some-other-id" } { "code-scanning": "some-id", "code-quality": "some-other-id" }
runs: runs:
using: node24 using: node20
main: '../lib/upload-sarif-action.js' main: '../lib/upload-sarif-action.js'
post: '../lib/upload-sarif-action-post.js' post: '../lib/upload-sarif-action-post.js'