Compare commits

..

99 Commits

Author SHA1 Message Date
Henry Mercer
4d8b358273 Disable SIP disablement check 2025-10-28 12:27:34 +00:00
Henry Mercer
f336c09493 Add testing trigger 2025-10-28 12:26:18 +00:00
Henry Mercer
239e305d18 Check disk usage using Node.js API
This was introduced in Node.js 18
2025-10-27 18:34:23 +00:00
Kasper Svendsen
8d77149e0c Merge pull request #3238 from github/kaspersv/extract-diff-range-computation
Move diff-range computation into utils
2025-10-27 15:40:12 +01:00
Kasper Svendsen
cc17bed958 Move diff-range computation tests 2025-10-27 09:46:16 +01:00
Kasper Svendsen
91ec0ed58f Move diff-range computation into utils for reuse 2025-10-27 09:43:11 +01:00
Kasper Svendsen
4e0b2cd814 Merge pull request #3232 from github/kaspersv/unique-overlay-base-keys
Ensure uniqueness of overlay-base database cache keys
2025-10-27 08:36:12 +01:00
Michael B. Gale
ae78991f55 Merge pull request #3236 from github/mergeback/v4.31.0-to-main-4e94bd11
Mergeback v4.31.0 refs/heads/releases/v4 into main
2025-10-24 18:30:37 +01:00
github-actions[bot]
dd565f3332 Rebuild 2025-10-24 17:11:09 +00:00
github-actions[bot]
fa46f22b12 Update changelog and version after v4.31.0 2025-10-24 17:08:58 +00:00
Michael B. Gale
4e94bd11f7 Merge pull request #3235 from github/update-v4.31.0-1d36546c1
Merge main into releases/v4
2025-10-24 18:08:08 +01:00
github-actions[bot]
8f11182164 Update changelog for v4.31.0 2025-10-24 16:33:59 +00:00
Michael B. Gale
1d36546c14 Merge pull request #3234 from github/mbg/changelog/post-processing
Add changelog entry for post-processing change
2025-10-24 17:26:22 +01:00
Michael B. Gale
08ada26e6a Add changelog entry for post-processing change 2025-10-24 17:07:13 +01:00
Michael B. Gale
b843cbeed0 Merge pull request #3233 from github/mbg/getOptionalEnvVar
Add `getOptionalEnvVar` helper
2025-10-24 16:55:48 +01:00
Michael B. Gale
1ecd563919 Use getOptionalEnvVar in writePostProcessedFiles 2025-10-24 16:18:09 +01:00
Henry Mercer
e576807920 Merge pull request #3223 from github/henrymercer/bump-minimum
Bump minimum CodeQL Bundle version to 2.17.6
2025-10-24 15:11:27 +01:00
Michael B. Gale
ad35676669 Add getOptionalEnvVar function
Also add tests for it and `getRequiredEnvParam`
2025-10-24 15:00:42 +01:00
Michael B. Gale
d75645b13f Merge pull request #3222 from github/mbg/upload-lib/post-process
Perform SARIF post-processing independently of upload
2025-10-24 14:59:04 +01:00
Kasper Svendsen
66759e57b2 Improve error handling for overlay-base cache key creation 2025-10-24 15:49:26 +02:00
Kasper Svendsen
cbcae45fff Reorder components of overlay-base cache key postfix 2025-10-24 15:46:17 +02:00
Michael B. Gale
710606cc35 Check that outputPath is non-empty 2025-10-24 14:42:36 +01:00
Michael B. Gale
f0452d5366 Consistently use "post-processing" 2025-10-24 10:20:25 +01:00
Kasper Svendsen
956c56734d Merge pull request #3231 from github/kaspersv/lower-overlay-base-size-limit
Overlay: Lower size limit for overlay base databases
2025-10-24 11:12:25 +02:00
Kasper Svendsen
b4ce335286 Ensure uniqueness of overlay-base database cache keys 2025-10-24 11:11:57 +02:00
Michael B. Gale
b9cd36824e Merge remote-tracking branch 'origin/main' into mbg/upload-lib/post-process 2025-10-24 10:08:38 +01:00
Chuan-kai Lin
c4b73722ba Add overlay-base database cache key tests 2025-10-24 10:47:17 +02:00
Kasper Svendsen
22d29ca74d Overlay: Lower size limit for overlay base databases 2025-10-24 08:06:42 +02:00
Michael B. Gale
9625890712 Merge pull request #3227 from github/mbg/permission-warning
Update wording in some log messages
2025-10-23 16:30:13 +01:00
Michael B. Gale
690d276755 Merge branch 'main' into mbg/permission-warning 2025-10-23 15:50:48 +01:00
Michael B. Gale
1c3c8066c3 Merge pull request #3228 from github/mbg/test/timeout
Bump timeout for `analyze-action-env` test
2025-10-23 15:49:27 +01:00
Michael B. Gale
da64a41e37 Bump timeout for analyze-action-input test 2025-10-23 15:23:21 +01:00
Michael B. Gale
8376af204a Bump timeout for analyze-action-env test 2025-10-23 13:39:38 +01:00
Michael B. Gale
f48b54af10 Fix fallback not being guarded by uploadKind check 2025-10-23 13:34:03 +01:00
Michael B. Gale
40b4cdd21f Update status report messages 2025-10-23 13:12:19 +01:00
Michael B. Gale
e849c567ec Update debug message 2025-10-23 13:04:06 +01:00
Michael B. Gale
d1b51f05c9 Update API permissions warning 2025-10-23 13:02:31 +01:00
Michael B. Gale
aed27f7231 Fix linter issue 2025-10-22 19:25:34 +01:00
Michael B. Gale
8ff870a6c2 Rename new input to processed-sarif-path 2025-10-22 19:12:57 +01:00
Michael B. Gale
6f0fcbeea7 Rename uploadSarif 2025-10-22 19:09:39 +01:00
Michael B. Gale
89d3359017 Improve test name 2025-10-22 19:05:05 +01:00
Michael B. Gale
d79c0a1339 Fix incomplete comment 2025-10-22 19:03:23 +01:00
Michael B. Gale
5e37670026 Use post-process-output in PR check 2025-10-22 19:01:42 +01:00
Michael B. Gale
def04c1c0e Add test for uploadSarif with output directory 2025-10-22 19:01:42 +01:00
Michael B. Gale
12f3cfef09 Write processed SARIF files if post-process-output input is provided 2025-10-22 19:01:40 +01:00
Michael B. Gale
c2bec36917 Add post-process-output input to analyze action 2025-10-22 19:00:33 +01:00
Michael B. Gale
14139c9f77 Add test for uploadSarif with upload: never 2025-10-22 19:00:33 +01:00
Michael B. Gale
596de7f1bc Move UploadKind check into uploadSarif 2025-10-22 19:00:29 +01:00
Michael B. Gale
899bf2fd1e Use postProcessSarifFiles and uploadProcessedFiles in uploadSarif 2025-10-22 18:48:24 +01:00
Michael B. Gale
6fbdd5f4e9 Split SARIF uploading steps from uploadSpecifiedFiles into a function 2025-10-22 18:48:03 +01:00
Michael B. Gale
489ed914f1 Split SARIF post-processing steps from uploadSpecifiedFiles into a function 2025-10-22 18:48:00 +01:00
Michael B. Gale
42642085de Merge pull request #3206 from github/mbg/analyze/use-upload-sarif
Use `uploadSarif` rather than `uploadFiles` in `analyze` action
2025-10-22 17:45:25 +01:00
Henry Mercer
4bd7dfe989 Merge pull request #3226 from github/henrymercer/prefer-optional-chaining
Linting: Prefer optional chaining
2025-10-22 17:13:00 +01:00
Michael B. Gale
ebd514f490 Address review comments 2025-10-22 17:11:19 +01:00
Henry Mercer
e5f165b8f5 Linting: Prefer optional chaining 2025-10-22 16:55:06 +01:00
Michael B. Gale
c98d5a9a4f Use checkoutPath and category constants consistently 2025-10-22 16:12:07 +01:00
Michael B. Gale
b7c814cb39 Gate uploadSarif behind FF, use old implementation otherwise 2025-10-22 15:54:51 +01:00
Michael B. Gale
f88cb01694 Add AnalyzeUseNewUpload feature 2025-10-22 15:49:28 +01:00
Henry Mercer
3cd3374657 Bump minor version number 2025-10-22 12:27:15 +01:00
Henry Mercer
3934593862 Remove analysisSummaryV2Default FF 2025-10-22 12:25:25 +01:00
Henry Mercer
bab3f2b5f5 Remove sarifMergeRunsFromEqualCategory FF 2025-10-22 12:22:55 +01:00
Henry Mercer
9924f476ba Add changelog note 2025-10-22 12:20:17 +01:00
Henry Mercer
bd5f49c7ca Bump minimum version to 2.17.6 2025-10-22 12:19:35 +01:00
Michael B. Gale
02b2c3aafc Fix style inconsistency 2025-10-22 12:04:04 +01:00
Michael B. Gale
aa048acb05 Merge branch 'main' into mbg/analyze/use-upload-sarif 2025-10-22 00:42:55 +01:00
Michael B. Gale
0c5185d061 Merge pull request #3221 from github/mbg/code-quality/skip-db-upload
Always skip database upload if `AnalysisKind.CodeScanning` is not enabled
2025-10-21 13:10:37 +01:00
Michael B. Gale
79ed9569a3 Always skip database upload if AnalysisKind.CodeScanning is not enabled 2025-10-21 12:33:56 +01:00
Henry Mercer
8e53c48f94 Merge pull request #3217 from github/henrymercer/http-error-handling
Wrap API configuration errors when setting up CodeQL
2025-10-21 12:15:21 +01:00
Henry Mercer
804fc665f9 Merge branch 'main' into henrymercer/http-error-handling 2025-10-21 10:37:41 +01:00
Henry Mercer
e6e649a8f3 Simplify API error checks 2025-10-21 10:31:53 +01:00
Henry Mercer
40e26468f3 Require message field too 2025-10-21 10:27:54 +01:00
Michael B. Gale
9b0ac1cc3b Merge pull request #3203 from github/mbg/errors/more-user-errors
Handle user errors for invalid `UserConfig`s and missing query files
2025-10-20 19:32:51 +01:00
Michael B. Gale
ffed63adb8 Merge pull request #3219 from github/dependabot/npm_and_yarn/npm-minor-5ed6ededba
Bump the npm-minor group with 5 updates
2025-10-20 19:14:12 +01:00
Michael B. Gale
bee06ec042 Merge pull request #3220 from github/dependabot/github_actions/dot-github/workflows/actions/setup-node-6
Bump actions/setup-node from 5 to 6 in /.github/workflows
2025-10-20 19:09:32 +01:00
github-actions[bot]
06f31ec789 Rebuild 2025-10-20 17:27:37 +00:00
dependabot[bot]
53588c5ad2 Bump actions/setup-node from 5 to 6 in /.github/workflows
Bumps [actions/setup-node](https://github.com/actions/setup-node) from 5 to 6.
- [Release notes](https://github.com/actions/setup-node/releases)
- [Commits](https://github.com/actions/setup-node/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/setup-node
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-20 17:26:07 +00:00
github-actions[bot]
2357c43cad Rebuild 2025-10-20 17:18:26 +00:00
dependabot[bot]
a3ff966dbf Bump the npm-minor group with 5 updates
Bumps the npm-minor group with 5 updates:

| Package | From | To |
| --- | --- | --- |
| [octokit](https://github.com/octokit/octokit.js) | `5.0.3` | `5.0.4` |
| [@eslint/js](https://github.com/eslint/eslint/tree/HEAD/packages/js) | `9.37.0` | `9.38.0` |
| [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) | `8.46.0` | `8.46.1` |
| [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) | `8.46.0` | `8.46.1` |
| [esbuild](https://github.com/evanw/esbuild) | `0.25.10` | `0.25.11` |


Updates `octokit` from 5.0.3 to 5.0.4
- [Release notes](https://github.com/octokit/octokit.js/releases)
- [Commits](https://github.com/octokit/octokit.js/compare/v5.0.3...v5.0.4)

Updates `@eslint/js` from 9.37.0 to 9.38.0
- [Release notes](https://github.com/eslint/eslint/releases)
- [Commits](https://github.com/eslint/eslint/commits/v9.38.0/packages/js)

Updates `@typescript-eslint/eslint-plugin` from 8.46.0 to 8.46.1
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/eslint-plugin/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.46.1/packages/eslint-plugin)

Updates `@typescript-eslint/parser` from 8.46.0 to 8.46.1
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/parser/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.46.1/packages/parser)

Updates `esbuild` from 0.25.10 to 0.25.11
- [Release notes](https://github.com/evanw/esbuild/releases)
- [Changelog](https://github.com/evanw/esbuild/blob/main/CHANGELOG.md)
- [Commits](https://github.com/evanw/esbuild/compare/v0.25.10...v0.25.11)

---
updated-dependencies:
- dependency-name: octokit
  dependency-version: 5.0.4
  dependency-type: direct:production
  update-type: version-update:semver-patch
  dependency-group: npm-minor
- dependency-name: "@eslint/js"
  dependency-version: 9.38.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: npm-minor
- dependency-name: "@typescript-eslint/eslint-plugin"
  dependency-version: 8.46.1
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: npm-minor
- dependency-name: "@typescript-eslint/parser"
  dependency-version: 8.46.1
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: npm-minor
- dependency-name: esbuild
  dependency-version: 0.25.11
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: npm-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-20 17:17:00 +00:00
Henry Mercer
6562050a4e Merge pull request #3218 from github/henrymercer/pr-sizes
Add experimental functionality for labelling PRs by their size
2025-10-20 17:45:46 +01:00
Henry Mercer
e9daf5bcd9 Comment version that is pinned
Co-authored-by: Michael B. Gale <mbg@github.com>
2025-10-20 17:25:01 +01:00
Henry Mercer
c13672ee32 Bump sizes a bit 2025-10-20 16:48:51 +01:00
Henry Mercer
f2f52d0d47 Add score for XL 2025-10-20 15:13:53 +01:00
Henry Mercer
08e53bec85 Update .github/sizeup.yml
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-20 15:12:50 +01:00
Henry Mercer
519594fe94 Update workflow name 2025-10-20 15:12:25 +01:00
Henry Mercer
8c324fe288 Add experimental functionality for labelling PRs by their size 2025-10-20 15:10:40 +01:00
Henry Mercer
a6b9514fab Wrap API configuration errors when setting up CodeQL 2025-10-20 15:01:44 +01:00
Henry Mercer
c64c4070cc Handle HTTP errors with httpStatusCode property 2025-10-20 14:38:02 +01:00
Henry Mercer
d88a5540c3 Merge pull request #3215 from github/mergeback/v4.30.9-to-main-16140ae1
Mergeback v4.30.9 refs/heads/releases/v4 into main
2025-10-17 17:06:54 +01:00
github-actions[bot]
aa0f6ea898 Rebuild 2025-10-17 15:40:22 +00:00
github-actions[bot]
b03dcd5d9d Update changelog and version after v4.30.9 2025-10-17 15:23:37 +00:00
Michael B. Gale
9ce56a247f Make schema for QueryFilter less strict 2025-10-17 15:11:16 +01:00
Michael B. Gale
2c8f4891d1 Add FF for config validation 2025-10-17 15:11:13 +01:00
Michael B. Gale
d7a8ae5fdd Include first 10 errors in exception message 2025-10-17 15:09:05 +01:00
Michael B. Gale
0822fb12e7 Log validation errors 2025-10-17 15:09:04 +01:00
Michael B. Gale
913cd47984 Add checkExpectedLogMessages function to testing-utils 2025-10-17 15:09:04 +01:00
Michael B. Gale
4f14649ced Add additional regex to CliConfigErrorCategory.PackCannotBeFound 2025-10-17 15:09:03 +01:00
Michael B. Gale
ac922ab562 Add and validate UserConfig schema 2025-10-17 15:09:01 +01:00
Michael B. Gale
66df0bc515 Add and use parseUserConfig
- Throws a `ConfigurationError` if parsing the YAML fails
- Add a couple of tests for it
2025-10-17 15:08:59 +01:00
Michael B. Gale
2ade8a09a3 Use uploadSarif rather than uploadFiles in analyze action 2025-10-14 19:49:42 +01:00
72 changed files with 17219 additions and 7003 deletions

55
.github/sizeup.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
labeling:
applyCategoryLabels: true
categoryLabelPrefix: "size/"
commenting:
addCommentWhenScoreThresholdHasBeenExceeded: false
sizeup:
categories:
- name: extra small
lte: 25
label:
name: XS
description: Should be very easy to review
color: 3cbf00
- name: small
lte: 100
label:
name: S
description: Should be easy to review
color: 5d9801
- name: medium
lte: 250
label:
name: M
description: Should be of average difficulty to review
color: 7f7203
- name: large
lte: 500
label:
name: L
description: May be hard to review
color: a14c05
- name: extra large
lte: 1000
label:
name: XL
description: May be very hard to review
color: c32607
- name: extra extra large
label:
name: XXL
description: May be extremely hard to review
color: e50009
ignoredFilePatterns:
- ".github/workflows/__*"
- "lib/**/*"
- "package-lock.json"
testFilePatterns:
- "**/*.test.ts"
scoring:
# This formula and the aliases below it are written in prefix notation.
# For an explanation of how this works, please see:
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
formula: "- - + additions deletions comments whitespace"

View File

@@ -49,7 +49,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm

View File

@@ -9,9 +9,6 @@ env:
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
@@ -56,42 +53,8 @@ jobs:
fail-fast: false
matrix:
include:
- os: macos-latest
version: stable-v2.17.6
- os: ubuntu-latest
version: stable-v2.17.6
- os: macos-latest
version: stable-v2.18.4
- os: ubuntu-latest
version: stable-v2.18.4
- os: macos-latest
version: stable-v2.19.4
- os: ubuntu-latest
version: stable-v2.19.4
- os: macos-latest
version: stable-v2.20.7
- os: ubuntu-latest
version: stable-v2.20.7
- os: macos-latest
version: stable-v2.21.4
- os: ubuntu-latest
version: stable-v2.21.4
- os: macos-latest
version: stable-v2.22.4
- os: ubuntu-latest
version: stable-v2.22.4
- os: macos-latest
version: default
- os: ubuntu-latest
version: default
- os: macos-latest
version: linked
- os: ubuntu-latest
version: linked
- os: macos-latest
version: nightly-latest
- os: ubuntu-latest
version: nightly-latest
name: Multi-language repository
if: github.triggering_actor != 'dependabot[bot]'
permissions:
@@ -185,6 +148,3 @@ jobs:
echo "Did not create a database for Swift, or created it in the wrong location."
exit 1
fi
env:
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
CODEQL_ACTION_TEST_MODE: true

View File

@@ -73,7 +73,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm

View File

@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm

View File

@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm

View File

@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm

View File

@@ -80,6 +80,7 @@ jobs:
with:
output: ${{ runner.temp }}/results
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v4
@@ -96,6 +97,14 @@ jobs:
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: ${{ runner.temp }}/results/javascript.quality.sarif
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: ${{ runner.temp }}/post-processed
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8

View File

@@ -56,7 +56,7 @@ jobs:
uses: actions/checkout@v5
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: 'npm'

26
.github/workflows/label-pr-size.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: Label PR with size
on:
pull_request:
types:
- opened
- synchronize
- reopened
- edited
- ready_for_review
permissions:
contents: read
pull-requests: write
jobs:
sizeup:
name: Label PR with size
runs-on: ubuntu-latest
steps:
- name: Run sizeup
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
with:
token: "${{ secrets.GITHUB_TOKEN }}"
configuration-file-path: ".github/sizeup.yml"

View File

@@ -47,7 +47,7 @@ jobs:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # ensure we have all tags and can push commits
- uses: actions/setup-node@v5
- uses: actions/setup-node@v6
- name: Update git config
run: |

View File

@@ -35,7 +35,7 @@ jobs:
- uses: actions/checkout@v5
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'

View File

@@ -32,7 +32,7 @@ jobs:
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: npm

View File

@@ -41,7 +41,7 @@ jobs:
git config --global user.name "github-actions[bot]"
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: 'npm'

View File

@@ -2,6 +2,15 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## [UNRELEASED]
No user facing changes.
## 4.31.0 - 24 Oct 2025
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
## 4.30.9 - 17 Oct 2025
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)

View File

@@ -6,7 +6,7 @@ inputs:
description: The name of the check run to add text to.
required: false
output:
description: The path of the directory in which to save the SARIF results
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
required: false
default: "../results"
upload:
@@ -70,6 +70,12 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
post-processed-sarif-path:
description: >-
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
argument for this input, they are written to the specified directory.
required: false
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true

View File

@@ -131,6 +131,7 @@ export default [
"no-sequences": "error",
"no-shadow": "off",
"@typescript-eslint/no-shadow": "error",
"@typescript-eslint/prefer-optional-chain": "error",
"one-var": ["error", "never"],
},
},

File diff suppressed because it is too large Load Diff

3711
lib/analyze-action.js generated

File diff suppressed because it is too large Load Diff

1635
lib/autobuild-action.js generated

File diff suppressed because it is too large Load Diff

3337
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

2100
lib/init-action.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

1619
lib/start-proxy-action.js generated

File diff suppressed because it is too large Load Diff

348
lib/upload-lib.js generated
View File

@@ -20885,19 +20885,19 @@ var require_validator = __commonJS({
var SchemaError = helpers.SchemaError;
var SchemaContext = helpers.SchemaContext;
var anonymousBase = "/";
var Validator2 = function Validator3() {
this.customFormats = Object.create(Validator3.prototype.customFormats);
var Validator3 = function Validator4() {
this.customFormats = Object.create(Validator4.prototype.customFormats);
this.schemas = {};
this.unresolvedRefs = [];
this.types = Object.create(types);
this.attributes = Object.create(attribute.validators);
};
Validator2.prototype.customFormats = {};
Validator2.prototype.schemas = null;
Validator2.prototype.types = null;
Validator2.prototype.attributes = null;
Validator2.prototype.unresolvedRefs = null;
Validator2.prototype.addSchema = function addSchema(schema2, base) {
Validator3.prototype.customFormats = {};
Validator3.prototype.schemas = null;
Validator3.prototype.types = null;
Validator3.prototype.attributes = null;
Validator3.prototype.unresolvedRefs = null;
Validator3.prototype.addSchema = function addSchema(schema2, base) {
var self2 = this;
if (!schema2) {
return null;
@@ -20915,25 +20915,25 @@ var require_validator = __commonJS({
});
return this.schemas[ourUri];
};
Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
if (!Array.isArray(schemas)) return;
for (var i = 0; i < schemas.length; i++) {
this.addSubSchema(baseuri, schemas[i]);
}
};
Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
if (!schemas || typeof schemas != "object") return;
for (var p in schemas) {
this.addSubSchema(baseuri, schemas[p]);
}
};
Validator2.prototype.setSchemas = function setSchemas(schemas) {
Validator3.prototype.setSchemas = function setSchemas(schemas) {
this.schemas = schemas;
};
Validator2.prototype.getSchema = function getSchema(urn) {
Validator3.prototype.getSchema = function getSchema(urn) {
return this.schemas[urn];
};
Validator2.prototype.validate = function validate(instance, schema2, options, ctx) {
Validator3.prototype.validate = function validate(instance, schema2, options, ctx) {
if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) {
throw new SchemaError("Expected `schema` to be an object or boolean");
}
@@ -20971,7 +20971,7 @@ var require_validator = __commonJS({
if (typeof ref == "string") return ref;
return false;
}
Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
var result = new ValidatorResult(instance, schema2, options, ctx);
if (typeof schema2 === "boolean") {
if (schema2 === true) {
@@ -21021,17 +21021,17 @@ var require_validator = __commonJS({
}
return result;
};
Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
};
Validator2.prototype.superResolve = function superResolve(schema2, ctx) {
Validator3.prototype.superResolve = function superResolve(schema2, ctx) {
var ref = shouldResolve(schema2);
if (ref) {
return this.resolve(schema2, ref, ctx).subschema;
}
return schema2;
};
Validator2.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
switchSchema = ctx.resolve(switchSchema);
if (ctx.schemas[switchSchema]) {
return { subschema: ctx.schemas[switchSchema], switchSchema };
@@ -21048,7 +21048,7 @@ var require_validator = __commonJS({
}
return { subschema, switchSchema };
};
Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
if (type2 === void 0) {
return;
} else if (type2 === null) {
@@ -21063,7 +21063,7 @@ var require_validator = __commonJS({
}
return true;
};
var types = Validator2.prototype.types = {};
var types = Validator3.prototype.types = {};
types.string = function testString(instance) {
return typeof instance == "string";
};
@@ -21091,7 +21091,7 @@ var require_validator = __commonJS({
types.object = function testObject(instance) {
return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date);
};
module2.exports = Validator2;
module2.exports = Validator3;
}
});
@@ -21099,7 +21099,7 @@ var require_validator = __commonJS({
var require_lib2 = __commonJS({
"node_modules/jsonschema/lib/index.js"(exports2, module2) {
"use strict";
var Validator2 = module2.exports.Validator = require_validator();
var Validator3 = module2.exports.Validator = require_validator();
module2.exports.ValidatorResult = require_helpers().ValidatorResult;
module2.exports.ValidatorResultError = require_helpers().ValidatorResultError;
module2.exports.ValidationError = require_helpers().ValidationError;
@@ -21107,7 +21107,7 @@ var require_lib2 = __commonJS({
module2.exports.SchemaScanResult = require_scan().SchemaScanResult;
module2.exports.scan = require_scan().scan;
module2.exports.validate = function(instance, schema2, options) {
var v = new Validator2();
var v = new Validator3();
return v.validate(instance, schema2, options);
};
}
@@ -21899,14 +21899,14 @@ var require_dist_node4 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -21998,7 +21998,7 @@ var require_dist_node5 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node4();
var import_request_error = require_dist_node4();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -22050,7 +22050,7 @@ var require_dist_node5 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -22061,7 +22061,7 @@ var require_dist_node5 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -22073,7 +22073,7 @@ var require_dist_node5 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -22093,7 +22093,7 @@ var require_dist_node5 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -22105,7 +22105,7 @@ var require_dist_node5 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -22547,14 +22547,14 @@ var require_dist_node7 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -22646,7 +22646,7 @@ var require_dist_node8 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node7();
var import_request_error = require_dist_node7();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -22698,7 +22698,7 @@ var require_dist_node8 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -22709,7 +22709,7 @@ var require_dist_node8 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -22721,7 +22721,7 @@ var require_dist_node8 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -22741,7 +22741,7 @@ var require_dist_node8 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -22753,7 +22753,7 @@ var require_dist_node8 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -33606,7 +33606,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.30.9",
version: "4.31.1",
private: true,
description: "CodeQL action",
scripts: {
@@ -33644,7 +33644,6 @@ var require_package = __commonJS({
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
archiver: "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
del: "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -33654,7 +33653,7 @@ var require_package = __commonJS({
jsonschema: "1.4.1",
long: "^5.3.2",
"node-forge": "^1.3.1",
octokit: "^5.0.3",
octokit: "^5.0.4",
semver: "^7.7.3",
uuid: "^13.0.0"
},
@@ -33662,7 +33661,7 @@ var require_package = __commonJS({
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.37.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
@@ -33673,10 +33672,10 @@ var require_package = __commonJS({
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
ava: "^6.4.1",
esbuild: "^0.25.10",
esbuild: "^0.25.11",
eslint: "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",
@@ -35065,14 +35064,14 @@ var require_dist_node14 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -35174,7 +35173,7 @@ var require_dist_node15 = __commonJS({
throw error2;
}
var import_light = __toESM2(require_light());
var import_request_error2 = require_dist_node14();
var import_request_error = require_dist_node14();
async function wrapRequest(state, octokit, request, options) {
const limiter = new import_light.default();
limiter.on("failed", function(error2, info4) {
@@ -35195,7 +35194,7 @@ var require_dist_node15 = __commonJS({
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
response.data.errors[0].message
)) {
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
request: options,
response
});
@@ -80921,14 +80920,14 @@ var require_tool_cache = __commonJS({
var assert_1 = require("assert");
var exec_1 = require_exec();
var retry_helper_1 = require_retry_helper();
var HTTPError = class extends Error {
var HTTPError2 = class extends Error {
constructor(httpStatusCode) {
super(`Unexpected HTTP response: ${httpStatusCode}`);
this.httpStatusCode = httpStatusCode;
Object.setPrototypeOf(this, new.target.prototype);
}
};
exports2.HTTPError = HTTPError;
exports2.HTTPError = HTTPError2;
var IS_WINDOWS = process.platform === "win32";
var IS_MAC = process.platform === "darwin";
var userAgent = "actions/tool-cache";
@@ -80945,7 +80944,7 @@ var require_tool_cache = __commonJS({
return yield retryHelper.execute(() => __awaiter4(this, void 0, void 0, function* () {
return yield downloadToolAttempt(url2, dest || "", auth, headers);
}), (err) => {
if (err instanceof HTTPError && err.httpStatusCode) {
if (err instanceof HTTPError2 && err.httpStatusCode) {
if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) {
return false;
}
@@ -80972,7 +80971,7 @@ var require_tool_cache = __commonJS({
}
const response = yield http.get(url2, headers);
if (response.message.statusCode !== 200) {
const err = new HTTPError(response.message.statusCode);
const err = new HTTPError2(response.message.statusCode);
core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
@@ -84847,6 +84846,7 @@ __export(upload_lib_exports, {
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
getSarifFilePaths: () => getSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
postProcessSarifFiles: () => postProcessSarifFiles,
readSarifFile: () => readSarifFile,
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
@@ -84854,10 +84854,11 @@ __export(upload_lib_exports, {
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
uploadFiles: () => uploadFiles,
uploadPayload: () => uploadPayload,
uploadSpecifiedFiles: () => uploadSpecifiedFiles,
uploadPostProcessedFiles: () => uploadPostProcessedFiles,
validateSarifFileSchema: () => validateSarifFileSchema,
validateUniqueCategory: () => validateUniqueCategory,
waitForProcessing: () => waitForProcessing
waitForProcessing: () => waitForProcessing,
writePostProcessedFiles: () => writePostProcessedFiles
});
module.exports = __toCommonJS(upload_lib_exports);
var fs13 = __toESM(require("fs"));
@@ -84865,7 +84866,7 @@ var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
var core11 = __toESM(require_core());
var jsonschema = __toESM(require_lib2());
var jsonschema2 = __toESM(require_lib2());
// src/actions-util.ts
var fs4 = __toESM(require("fs"));
@@ -88328,13 +88329,35 @@ function getRequiredEnvParam(paramName) {
}
return value;
}
function getOptionalEnvVar(paramName) {
const value = process.env[paramName];
if (value?.trim().length === 0) {
return void 0;
}
return value;
}
var HTTPError = class extends Error {
constructor(message, status) {
super(message);
this.status = status;
}
};
var ConfigurationError = class extends Error {
constructor(message) {
super(message);
}
};
function isHTTPError(arg) {
return arg?.status !== void 0 && Number.isInteger(arg.status);
function asHTTPError(arg) {
if (typeof arg !== "object" || arg === null || typeof arg.message !== "string") {
return void 0;
}
if (Number.isInteger(arg.status)) {
return new HTTPError(arg.message, arg.status);
}
if (Number.isInteger(arg.httpStatusCode)) {
return new HTTPError(arg.message, arg.httpStatusCode);
}
return void 0;
}
var cachedCodeQlVersion = void 0;
function cacheCodeQlVersion(version) {
@@ -88747,14 +88770,24 @@ function computeAutomationID(analysis_key, environment) {
return automationID;
}
function wrapApiConfigurationError(e) {
if (isHTTPError(e)) {
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
return new ConfigurationError(e.message);
} else if (e.message.includes("Bad credentials") || e.message.includes("Not Found")) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
if ([
/API rate limit exceeded/,
/commit not found/,
/Resource not accessible by integration/,
/ref .* not found in this repository/
].some((pattern) => pattern.test(httpError.message))) {
return new ConfigurationError(httpError.message);
}
if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found")) {
return new ConfigurationError(
"Please check that your token is valid and has the required permissions: contents: read, security-events: write"
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}
}
return e;
}
@@ -88765,45 +88798,6 @@ var path12 = __toESM(require("path"));
var core10 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
// node_modules/@octokit/request-error/dist-src/index.js
var RequestError = class extends Error {
name;
/**
* http status code
*/
status;
/**
* Request options that lead to the error.
*/
request;
/**
* Response object if a response was received
*/
response;
constructor(message, statusCode, options) {
super(message);
this.name = "HttpError";
this.status = Number.parseInt(statusCode);
if (Number.isNaN(this.status)) {
this.status = 0;
}
if ("response" in options) {
this.response = options.response;
}
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(
/(?<! ) .*$/,
" [REDACTED]"
)
});
}
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy;
}
};
// src/cli-errors.ts
var SUPPORTED_PLATFORMS = [
["linux", "x64"],
@@ -88971,6 +88965,9 @@ var cliErrorsConfig = {
cliErrorMessageCandidates: [
new RegExp(
"Query pack .* cannot be found\\. Check the spelling of the pack\\."
),
new RegExp(
"is not a .ql file, .qls file, a directory, or a query pack specification."
)
]
},
@@ -89049,6 +89046,7 @@ var path9 = __toESM(require("path"));
var core6 = __toESM(require_core());
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
var semver2 = __toESM(require_semver2());
var PACK_IDENTIFIER_PATTERN = (function() {
const alphaNumeric = "[a-z0-9]";
@@ -89286,7 +89284,7 @@ function formatDuration(durationMs) {
// src/overlay-database-utils.ts
var CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
@@ -89359,6 +89357,11 @@ var featureConfig = {
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: void 0
},
["analyze_use_new_upload" /* AnalyzeUseNewUpload */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: void 0
},
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -89530,6 +89533,11 @@ var featureConfig = {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0"
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
minimumVersion: void 0
}
};
@@ -89708,13 +89716,13 @@ async function getTarVersion() {
}
if (stdout.includes("GNU tar")) {
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "gnu", version: match[1] };
} else if (stdout.includes("bsdtar")) {
const match = stdout.match(/bsdtar ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "bsd", version: match[1] };
@@ -90094,7 +90102,7 @@ function tryGetTagNameFromUrl(url2, logger) {
return void 0;
}
const match = matches[matches.length - 1];
if (match === null || match.length !== 2) {
if (match?.length !== 2) {
logger.debug(
`Could not determine tag name for URL ${url2}. Matched ${JSON.stringify(
match
@@ -90554,7 +90562,7 @@ async function shouldEnableIndirectTracing(codeql, config) {
// src/codeql.ts
var cachedCodeQL = void 0;
var CODEQL_MINIMUM_VERSION = "2.16.6";
var CODEQL_MINIMUM_VERSION = "2.17.6";
var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
@@ -90598,9 +90606,9 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
toolsVersion,
zstdAvailability
};
} catch (e) {
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
} catch (rawError) {
const e = wrapApiConfigurationError(rawError);
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
throw new ErrorClass(
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
@@ -90889,12 +90897,6 @@ ${output}`
} else {
codeqlArgs.push("--no-sarif-include-diagnostics");
}
if (!isSupportedToolsFeature(
await this.getVersion(),
"analysisSummaryV2Default" /* AnalysisSummaryV2IsDefault */
)) {
codeqlArgs.push("--new-analysis-summary");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);
@@ -92444,24 +92446,6 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
);
codeQL = initCodeQLResult.codeql;
}
if (!await codeQL.supportsFeature(
"sarifMergeRunsFromEqualCategory" /* SarifMergeRunsFromEqualCategory */
)) {
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
logger.warning(
"The CodeQL CLI does not support merging SARIF files. Merging files in the action."
);
if (await shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects,
gitHubVersion
)) {
logger.warning(
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`
);
core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
}
return combineSarifFiles(sarifFiles, logger);
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs13.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs13.mkdtempSync(path14.resolve(baseTempDir, "output-"));
@@ -92520,16 +92504,17 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
logger.info("Successfully uploaded results");
return response.data.id;
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
switch (httpError.status) {
case 403:
core11.warning(e.message || GENERIC_403_MSG);
core11.warning(httpError.message || GENERIC_403_MSG);
break;
case 404:
core11.warning(e.message || GENERIC_404_MSG);
core11.warning(httpError.message || GENERIC_404_MSG);
break;
default:
core11.warning(e.message);
core11.warning(httpError.message);
break;
}
}
@@ -92651,7 +92636,7 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) {
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarif, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -92711,26 +92696,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
}
return payloadObj;
}
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget
);
}
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -92758,28 +92728,72 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
analysisKey,
environment
);
return { sarif, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
if (outputPath !== void 0) {
dumpSarifFile(
JSON.stringify(postProcessingResults.sarif),
outputPath,
logger,
uploadTarget
);
} else {
logger.debug(`Not writing post-processed SARIF files.`);
}
}
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget
);
}
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
const processingResults = await postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifPaths,
category,
uploadTarget
);
return uploadPostProcessedFiles(
logger,
checkoutPath,
uploadTarget,
processingResults
);
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
if (dumpDir) {
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
}
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
const payload = buildPayload(
await getCommitOid(checkoutPath),
await getRef(),
analysisKey,
postProcessingResults.analysisKey,
getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
getWorkflowRunID(),
getWorkflowRunAttempt(),
checkoutURI,
environment,
postProcessingResults.environment,
toolNames,
await determineBaseBranchHeadCommitOid()
);
@@ -92810,14 +92824,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
fs13.mkdirSync(outputDir, { recursive: true });
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}`
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
}
const outputFile = path14.resolve(
outputDir,
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Dumping processed SARIF file to ${outputFile}`);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs13.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
@@ -92979,6 +92993,7 @@ function filterAlertsByDiffRange(logger, sarif) {
getGroupedSarifFilePaths,
getSarifFilePaths,
populateRunAutomationDetails,
postProcessSarifFiles,
readSarifFile,
shouldConsiderConfigurationError,
shouldConsiderInvalidRequest,
@@ -92986,10 +93001,11 @@ function filterAlertsByDiffRange(logger, sarif) {
throwIfCombineSarifFilesDisabled,
uploadFiles,
uploadPayload,
uploadSpecifiedFiles,
uploadPostProcessedFiles,
validateSarifFileSchema,
validateUniqueCategory,
waitForProcessing
waitForProcessing,
writePostProcessedFiles
});
/*! Bundled license information:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

966
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.30.9",
"version": "4.31.1",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -38,7 +38,6 @@
"@octokit/request-error": "^7.0.1",
"@schemastore/package": "0.0.10",
"archiver": "^7.0.1",
"check-disk-space": "^3.4.0",
"console-log-level": "^1.4.1",
"del": "^8.0.0",
"fast-deep-equal": "^3.1.3",
@@ -48,7 +47,7 @@
"jsonschema": "1.4.1",
"long": "^5.3.2",
"node-forge": "^1.3.1",
"octokit": "^5.0.3",
"octokit": "^5.0.4",
"semver": "^7.7.3",
"uuid": "^13.0.0"
},
@@ -56,7 +55,7 @@
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.37.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
@@ -67,10 +66,10 @@
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
"ava": "^6.4.1",
"esbuild": "^0.25.10",
"esbuild": "^0.25.11",
"eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",

View File

@@ -36,6 +36,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
upload-database: false
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v4
@@ -52,6 +53,14 @@ steps:
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: "${{ runner.temp }}/results/javascript.quality.sarif"
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: "${{ runner.temp }}/post-processed"
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8

View File

@@ -117,7 +117,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
steps.extend([
{
'name': 'Install Node.js',
'uses': 'actions/setup-node@v5',
'uses': 'actions/setup-node@v6',
'with': {
'node-version': '20.x',
'cache': 'npm',

View File

@@ -24,6 +24,9 @@ setupTests(test);
// but the first test would fail.
test("analyze action with RAM & threads from environment variables", async (t) => {
// This test frequently times out on Windows with the default timeout, so we bump
// it a bit to 20s.
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";

View File

@@ -24,6 +24,7 @@ setupTests(test);
// but the first test would fail.
test("analyze action with RAM & threads from action inputs", async (t) => {
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";

View File

@@ -52,6 +52,7 @@ import {
} from "./trap-caching";
import * as uploadLib from "./upload-lib";
import { UploadResult } from "./upload-lib";
import { postProcessAndUploadSarif } from "./upload-sarif";
import * as util from "./util";
interface AnalysisStatusReport
@@ -211,7 +212,9 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
async function run() {
const startedAt = new Date();
let uploadResult: UploadResult | undefined = undefined;
let uploadResults:
| Partial<Record<analyses.AnalysisKind, UploadResult>>
| undefined = undefined;
let runStats: QueriesStatusReport | undefined = undefined;
let config: Config | undefined = undefined;
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
@@ -341,31 +344,67 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
core.setOutput("sarif-output", path.resolve(outputDir));
const uploadInput = actionsUtil.getOptionalInput("upload");
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
if (isCodeScanningEnabled(config)) {
uploadResult = await uploadLib.uploadFiles(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
features,
const uploadKind = actionsUtil.getUploadValue(
actionsUtil.getOptionalInput("upload"),
);
if (runStats) {
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
const category = actionsUtil.getOptionalInput("category");
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
uploadResults = await postProcessAndUploadSarif(
logger,
analyses.CodeScanning,
features,
uploadKind,
checkoutPath,
outputDir,
category,
actionsUtil.getOptionalInput("post-processed-sarif-path"),
);
core.setOutput("sarif-id", uploadResult.sarifID);
} else if (uploadKind === "always") {
uploadResults = {};
if (isCodeScanningEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeScanning] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeScanning,
);
}
if (isCodeQualityEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeQuality] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeQuality,
);
}
} else {
uploadResults = {};
logger.info("Not uploading results");
}
if (isCodeQualityEnabled(config)) {
const analysis = analyses.CodeQuality;
const qualityUploadResult = await uploadLib.uploadFiles(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
features,
logger,
analysis,
// Set the SARIF id outputs only if we have results for them, to avoid
// having keys with empty values in the action output.
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
core.setOutput(
"sarif-id",
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
);
}
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
core.setOutput(
"quality-sarif-id",
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
);
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
}
} else {
logger.info("Not uploading results");
@@ -408,12 +447,12 @@ async function run() {
if (util.isInTestMode()) {
logger.debug("In test mode. Waiting for processing is disabled.");
} else if (
uploadResult !== undefined &&
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await uploadLib.waitForProcessing(
getRepositoryNwo(),
uploadResult.sarifID,
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
getActionsLogger(),
);
}
@@ -450,13 +489,16 @@ async function run() {
return;
}
if (runStats && uploadResult) {
if (
runStats !== undefined &&
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
) {
await sendStatusReport(
startedAt,
config,
{
...runStats,
...uploadResult.statusReport,
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
},
undefined,
trapCacheUploadTime,
@@ -466,7 +508,7 @@ async function run() {
dependencyCacheResults,
logger,
);
} else if (runStats) {
} else if (runStats !== undefined) {
await sendStatusReport(
startedAt,
config,

View File

@@ -4,10 +4,8 @@ import * as path from "path";
import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { CodeQuality, CodeScanning } from "./analyses";
import {
exportedForTesting,
runQueries,
defaultSuites,
resolveQuerySuiteAlias,
@@ -131,204 +129,6 @@ test("status report fields", async (t) => {
});
});
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("checkout_path")
.returns("/checkout/path");
return exportedForTesting.getDiffRanges(
{
filename: "test.txt",
changes,
patch: patch?.join("\n"),
},
getRunnerLogger(true),
);
}
test("getDiffRanges: file unchanged", async (t) => {
const diffRanges = runGetDiffRanges(0, undefined);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: file diff too large", async (t) => {
const diffRanges = runGetDiffRanges(1000000, undefined);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 0,
endLine: 0,
},
]);
});
test("getDiffRanges: diff thunk with single addition range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
});
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: diff thunk with single update range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
]);
});
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,9 @@",
" a",
" b",
" c",
"+1",
" c",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
{
path: "/checkout/path/test.txt",
startLine: 55,
endLine: 55,
},
]);
});
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
" d",
"-2",
"+3",
" e",
" f",
"+4",
"+5",
" g",
" h",
" i",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 54,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 57,
endLine: 58,
},
]);
});
test("getDiffRanges: multiple diff thunks", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
"@@ -130,6 +150,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 153,
endLine: 154,
},
]);
});
test("getDiffRanges: no diff context lines", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 50,
endLine: 51,
},
]);
});
test("getDiffRanges: malformed thunk header", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, undefined);
});
test("resolveQuerySuiteAlias", (t) => {
// default query suite names should resolve to something language-specific ending in `.qls`.
for (const suite of defaultSuites) {

View File

@@ -6,13 +6,8 @@ import * as io from "@actions/io";
import * as del from "del";
import * as yaml from "js-yaml";
import {
getRequiredInput,
getTemporaryDirectory,
PullRequestBranches,
} from "./actions-util";
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
import * as analyses from "./analyses";
import { getApiClient } from "./api-client";
import { setupCppAutobuild } from "./autobuild";
import { type CodeQL } from "./codeql";
import * as configUtils from "./config-utils";
@@ -21,13 +16,13 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics";
import {
DiffThunkRange,
writeDiffRangesJsonFile,
getPullRequestEditedDiffRanges,
} from "./diff-informed-analysis-utils";
import { EnvVar } from "./environment";
import { FeatureEnablement, Feature } from "./feature-flags";
import { KnownLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay-database-utils";
import { getRepositoryNwoFromEnv } from "./repository";
import { DatabaseCreationTimings, EventReport } from "./status-report";
import { endTracingForCluster } from "./tracer-config";
import * as util from "./util";
@@ -313,185 +308,6 @@ export async function setupDiffInformedQueryRun(
);
}
/**
* Return the file line ranges that were added or modified in the pull request.
*
* @param branches The base and head branches of the pull request.
* @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an
* added or modified range in that file. Returns `undefined` if the action was
* not triggered by a pull request or if there was an error.
*/
async function getPullRequestEditedDiffRanges(
branches: PullRequestBranches,
logger: Logger,
): Promise<DiffThunkRange[] | undefined> {
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
if (fileDiffs === undefined) {
return undefined;
}
if (fileDiffs.length >= 300) {
// The "compare two commits" API returns a maximum of 300 changed files. If
// we see that many changed files, it is possible that there could be more,
// with the rest being truncated. In this case, we should not attempt to
// compute the diff ranges, as the result would be incomplete.
logger.warning(
`Cannot retrieve the full diff because there are too many ` +
`(${fileDiffs.length}) changed files in the pull request.`,
);
return undefined;
}
const results: DiffThunkRange[] = [];
for (const filediff of fileDiffs) {
const diffRanges = getDiffRanges(filediff, logger);
if (diffRanges === undefined) {
return undefined;
}
results.push(...diffRanges);
}
return results;
}
/**
* This interface is an abbreviated version of the file diff object returned by
* the GitHub API.
*/
interface FileDiff {
filename: string;
changes: number;
// A patch may be absent if the file is binary, if the file diff is too large,
// or if the file is unchanged.
patch?: string | undefined;
}
async function getFileDiffsWithBasehead(
branches: PullRequestBranches,
logger: Logger,
): Promise<FileDiff[] | undefined> {
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
// to GITHUB_REPOSITORY.
const repositoryNwo = getRepositoryNwoFromEnv(
"CODE_SCANNING_REPOSITORY",
"GITHUB_REPOSITORY",
);
const basehead = `${branches.base}...${branches.head}`;
try {
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
basehead,
per_page: 1,
},
);
logger.debug(
`Response from compareCommitsWithBasehead(${basehead}):` +
`\n${JSON.stringify(response, null, 2)}`,
);
return response.data.files;
} catch (error: any) {
if (error.status) {
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
logger.debug(
`Error running compareCommitsWithBasehead(${basehead}):` +
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
);
return undefined;
} else {
throw error;
}
}
}
function getDiffRanges(
fileDiff: FileDiff,
logger: Logger,
): DiffThunkRange[] | undefined {
// Diff-informed queries expect the file path to be absolute. CodeQL always
// uses forward slashes as the path separator, so on Windows we need to
// replace any backslashes with forward slashes.
const filename = path
.join(getRequiredInput("checkout_path"), fileDiff.filename)
.replaceAll(path.sep, "/");
if (fileDiff.patch === undefined) {
if (fileDiff.changes === 0) {
// There are situations where a changed file legitimately has no diff.
// For example, the file may be a binary file, or that the file may have
// been renamed with no changes to its contents. In these cases, the
// file would be reported as having 0 changes, and we can return an empty
// array to indicate no diff range in this file.
return [];
}
// If a file is reported to have nonzero changes but no patch, that may be
// due to the file diff being too large. In this case, we should fall back
// to a special diff range that covers the entire file.
return [
{
path: filename,
startLine: 0,
endLine: 0,
},
];
}
// The 1-based file line number of the current line
let currentLine = 0;
// The 1-based file line number that starts the current range of added lines
let additionRangeStartLine: number | undefined = undefined;
const diffRanges: DiffThunkRange[] = [];
const diffLines = fileDiff.patch.split("\n");
// Adding a fake context line at the end ensures that the following loop will
// always terminate the last range of added lines.
diffLines.push(" ");
for (const diffLine of diffLines) {
if (diffLine.startsWith("-")) {
// Ignore deletions completely -- we do not even want to consider them when
// calculating consecutive ranges of added lines.
continue;
}
if (diffLine.startsWith("+")) {
if (additionRangeStartLine === undefined) {
additionRangeStartLine = currentLine;
}
currentLine++;
continue;
}
if (additionRangeStartLine !== undefined) {
// Any line that does not start with a "+" or "-" terminates the current
// range of added lines.
diffRanges.push({
path: filename,
startLine: additionRangeStartLine,
endLine: currentLine - 1,
});
additionRangeStartLine = undefined;
}
if (diffLine.startsWith("@@ ")) {
// A new hunk header line resets the current line number.
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
if (match === null) {
logger.warning(
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
);
return undefined;
}
currentLine = parseInt(match[1], 10);
continue;
}
if (diffLine.startsWith(" ")) {
// An unchanged context line advances the current line number.
currentLine++;
continue;
}
}
return diffRanges;
}
/**
* Create an extension pack in the temporary directory that contains the file
* line ranges that were added or modified in the pull request.
@@ -922,7 +738,3 @@ export async function warnIfGoInstalledAfterInit(
}
}
}
export const exportedForTesting = {
getDiffRanges,
};

View File

@@ -7,12 +7,12 @@ import { getActionVersion, getRequiredInput } from "./actions-util";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import {
asHTTPError,
ConfigurationError,
getRequiredEnvParam,
GITHUB_DOTCOM_URL,
GitHubVariant,
GitHubVersion,
isHTTPError,
parseGitHubUrl,
parseMatrixInput,
} from "./util";
@@ -280,22 +280,29 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
}
export function wrapApiConfigurationError(e: unknown) {
if (isHTTPError(e)) {
const httpError = asHTTPError(e);
if (httpError !== undefined) {
if (
e.message.includes("API rate limit exceeded for installation") ||
e.message.includes("commit not found") ||
e.message.includes("Resource not accessible by integration") ||
/ref .* not found in this repository/.test(e.message)
[
/API rate limit exceeded/,
/commit not found/,
/Resource not accessible by integration/,
/ref .* not found in this repository/,
].some((pattern) => pattern.test(httpError.message))
) {
return new ConfigurationError(e.message);
} else if (
e.message.includes("Bad credentials") ||
e.message.includes("Not Found")
return new ConfigurationError(httpError.message);
}
if (
httpError.message.includes("Bad credentials") ||
httpError.message.includes("Not Found")
) {
return new ConfigurationError(
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}
}
return e;
}

View File

@@ -310,6 +310,20 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
t.true(wrappedError instanceof ConfigurationError);
});
test("wrapCliConfigurationError - unknown query file", (t) => {
const commandError = new CommandInvocationError(
"codeql",
["database", "init"],
2,
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
);
const cliError = new CliError(commandError);
const wrappedError = wrapCliConfigurationError(cliError);
t.true(wrappedError instanceof ConfigurationError);
});
test("wrapCliConfigurationError - pack missing auth", (t) => {
const commandError = new CommandInvocationError(
"codeql",

View File

@@ -264,6 +264,9 @@ export const cliErrorsConfig: Record<
new RegExp(
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
),
new RegExp(
"is not a .ql file, .qls file, a directory, or a query pack specification.",
),
],
},
[CliConfigErrorCategory.PackMissingAuth]: {

View File

@@ -36,7 +36,6 @@ import {
createTestConfig,
} from "./testing-utils";
import { ToolsDownloadStatusReport } from "./tools-download";
import { ToolsFeature } from "./tools-features";
import * as util from "./util";
import { initializeEnvironment } from "./util";
@@ -870,84 +869,6 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
});
});
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
{
codeqlVersion: makeVersionInfo("2.15.0", {
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
}),
githubVersion: {
type: util.GitHubVariant.DOTCOM,
},
flagPassed: false,
negativeFlagPassed: false,
},
{
codeqlVersion: makeVersionInfo("2.15.0"),
githubVersion: {
type: util.GitHubVariant.DOTCOM,
},
flagPassed: true,
negativeFlagPassed: false,
},
{
codeqlVersion: makeVersionInfo("2.15.0"),
githubVersion: {
type: util.GitHubVariant.GHES,
version: "3.10.0",
},
flagPassed: true,
negativeFlagPassed: false,
},
];
for (const {
codeqlVersion,
flagPassed,
githubVersion,
negativeFlagPassed,
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
test(`database interpret-results passes ${
flagPassed
? "--new-analysis-summary"
: negativeFlagPassed
? "--no-new-analysis-summary"
: "nothing"
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
util.GitHubVariant[githubVersion.type]
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
await codeqlObject.databaseInterpretResults(
"",
[],
"",
"",
"",
"-v",
undefined,
"",
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
createFeatures([]),
);
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
actualArgs.includes("--new-analysis-summary"),
flagPassed,
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
);
t.is(
actualArgs.includes("--no-new-analysis-summary"),
negativeFlagPassed,
`--no-new-analysis-summary should${
negativeFlagPassed ? "" : "n't"
} be passed`,
);
});
}
test("runTool summarizes several fatal errors", async (t) => {
const heapError =
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";

View File

@@ -3,7 +3,6 @@ import * as path from "path";
import * as core from "@actions/core";
import * as toolrunner from "@actions/exec/lib/toolrunner";
import { RequestError } from "@octokit/request-error";
import * as yaml from "js-yaml";
import {
@@ -268,7 +267,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
* The version flags below can be used to conditionally enable certain features
* on versions newer than this.
*/
const CODEQL_MINIMUM_VERSION = "2.16.6";
const CODEQL_MINIMUM_VERSION = "2.17.6";
/**
* This version will shortly become the oldest version of CodeQL that the Action will run with.
@@ -371,11 +370,11 @@ export async function setupCodeQL(
toolsVersion,
zstdAvailability,
};
} catch (e) {
} catch (rawError) {
const e = api.wrapApiConfigurationError(rawError);
const ErrorClass =
e instanceof util.ConfigurationError ||
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
(e instanceof RequestError && e.status === 429) // rate limited
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
? util.ConfigurationError
: Error;
@@ -861,14 +860,6 @@ export async function getCodeQLForCmd(
} else {
codeqlArgs.push("--no-sarif-include-diagnostics");
}
if (
!isSupportedToolsFeature(
await this.getVersion(),
ToolsFeature.AnalysisSummaryV2IsDefault,
)
) {
codeqlArgs.push("--new-analysis-summary");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);

View File

@@ -148,6 +148,7 @@ test("load empty config", async (t) => {
});
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: languages,
repository: { owner: "github", repo: "example" },
@@ -187,6 +188,7 @@ test("load code quality config", async (t) => {
});
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
@@ -271,6 +273,7 @@ test("initActionState doesn't throw if there are queries configured in the repos
await t.notThrowsAsync(async () => {
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
@@ -309,6 +312,7 @@ test("loading a saved config produces the same config", async (t) => {
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
const config1 = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: "javascript,python",
tempDir,
@@ -360,6 +364,7 @@ test("loading config with version mismatch throws", async (t) => {
.returns("does-not-exist");
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: "javascript,python",
tempDir,
@@ -388,6 +393,7 @@ test("load input outside of workspace", async (t) => {
return await withTmpDir(async (tempDir) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: "../input",
tempDir,
@@ -415,6 +421,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile,
tempDir,
@@ -443,6 +450,7 @@ test("load non-existent input", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile,
@@ -526,6 +534,7 @@ test("load non-empty input", async (t) => {
const configFilePath = createConfigFile(inputFileContents, tempDir);
const actualConfig = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
buildModeInput: "none",
@@ -582,6 +591,7 @@ test("Using config input and file together, config input should be used.", async
const languagesInput = "javascript";
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile: configFilePath,
@@ -632,6 +642,7 @@ test("API client used when reading remote config", async (t) => {
const languagesInput = "javascript";
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile,
@@ -652,6 +663,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
@@ -680,6 +692,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
@@ -709,6 +722,7 @@ test("No detected languages", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
tempDir,
codeql,
@@ -731,6 +745,7 @@ test("Unknown languages", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
tempDir,

View File

@@ -19,6 +19,7 @@ import {
calculateAugmentation,
ExcludeQueryFilter,
generateCodeScanningConfig,
parseUserConfig,
UserConfig,
} from "./config/db-config";
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
@@ -525,10 +526,12 @@ async function downloadCacheWithTime(
}
async function loadUserConfig(
logger: Logger,
configFile: string,
workspacePath: string,
apiDetails: api.GitHubApiCombinedDetails,
tempDir: string,
validateConfig: boolean,
): Promise<UserConfig> {
if (isLocal(configFile)) {
if (configFile !== userConfigFromActionPath(tempDir)) {
@@ -541,9 +544,14 @@ async function loadUserConfig(
);
}
}
return getLocalConfig(configFile);
return getLocalConfig(logger, configFile, validateConfig);
} else {
return await getRemoteConfig(configFile, apiDetails);
return await getRemoteConfig(
logger,
configFile,
apiDetails,
validateConfig,
);
}
}
@@ -779,7 +787,10 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
export async function initConfig(
features: FeatureEnablement,
inputs: InitConfigInputs,
): Promise<Config> {
const { logger, tempDir } = inputs;
// if configInput is set, it takes precedence over configFile
@@ -799,11 +810,14 @@ export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
logger.debug("No configuration file was provided");
} else {
logger.debug(`Using configuration file: ${inputs.configFile}`);
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
userConfig = await loadUserConfig(
logger,
inputs.configFile,
inputs.workspacePath,
inputs.apiDetails,
tempDir,
validateConfig,
);
}
@@ -897,7 +911,11 @@ function isLocal(configPath: string): boolean {
return configPath.indexOf("@") === -1;
}
function getLocalConfig(configFile: string): UserConfig {
function getLocalConfig(
logger: Logger,
configFile: string,
validateConfig: boolean,
): UserConfig {
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new ConfigurationError(
@@ -905,12 +923,19 @@ function getLocalConfig(configFile: string): UserConfig {
);
}
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
return parseUserConfig(
logger,
configFile,
fs.readFileSync(configFile, "utf-8"),
validateConfig,
);
}
async function getRemoteConfig(
logger: Logger,
configFile: string,
apiDetails: api.GitHubApiCombinedDetails,
validateConfig: boolean,
): Promise<UserConfig> {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp(
@@ -918,7 +943,7 @@ async function getRemoteConfig(
);
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
if (pieces?.groups === undefined || pieces.length < 5) {
throw new ConfigurationError(
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
);
@@ -946,9 +971,12 @@ async function getRemoteConfig(
);
}
return yaml.load(
return parseUserConfig(
logger,
configFile,
Buffer.from(fileContents, "base64").toString("binary"),
) as UserConfig;
validateConfig,
);
}
/**

View File

@@ -2,7 +2,13 @@ import test, { ExecutionContext } from "ava";
import { RepositoryProperties } from "../feature-flags/properties";
import { KnownLanguage, Language } from "../languages";
import { prettyPrintPack } from "../util";
import { getRunnerLogger } from "../logging";
import {
checkExpectedLogMessages,
getRecordingLogger,
LoggedMessage,
} from "../testing-utils";
import { ConfigurationError, prettyPrintPack } from "../util";
import * as dbConfig from "./db-config";
@@ -391,3 +397,111 @@ test(
{},
/"a-pack-without-a-scope" is not a valid pack/,
);
test("parseUserConfig - successfully parses valid YAML", (t) => {
const result = dbConfig.parseUserConfig(
getRunnerLogger(true),
"test",
`
paths-ignore:
- "some/path"
queries:
- uses: foo
some-unknown-option: true
`,
true,
);
t.truthy(result);
if (t.truthy(result["paths-ignore"])) {
t.is(result["paths-ignore"].length, 1);
t.is(result["paths-ignore"][0], "some/path");
}
if (t.truthy(result["queries"])) {
t.is(result["queries"].length, 1);
t.deepEqual(result["queries"][0], { uses: "foo" });
}
});
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
t.throws(
() =>
dbConfig.parseUserConfig(
getRunnerLogger(true),
"test",
`
paths-ignore:
- "some/path"
queries:
- foo
`,
true,
),
{
instanceOf: ConfigurationError,
},
);
});
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.notThrows(() =>
dbConfig.parseUserConfig(
logger,
"test",
`
query-filters:
- something
- include: foo
- exclude: bar
`,
true,
),
);
});
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.throws(
() =>
dbConfig.parseUserConfig(
logger,
"test",
`
paths-ignore:
- "some/path"
queries: true
`,
true,
),
{
instanceOf: ConfigurationError,
message:
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
},
);
const expectedMessages = ["instance.queries is not of a type(s) array"];
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
});
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.notThrows(() =>
dbConfig.parseUserConfig(
logger,
"test",
`
paths-ignore:
- "some/path"
queries: true
`,
false,
),
);
});

View File

@@ -1,5 +1,7 @@
import * as path from "path";
import * as yaml from "js-yaml";
import * as jsonschema from "jsonschema";
import * as semver from "semver";
import * as errorMessages from "../error-messages";
@@ -378,10 +380,7 @@ function combineQueries(
const result: QuerySpec[] = [];
// Query settings obtained from the repository properties have the highest precedence.
if (
augmentationProperties.repoPropertyQueries &&
augmentationProperties.repoPropertyQueries.input
) {
if (augmentationProperties.repoPropertyQueries?.input) {
logger.info(
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
@@ -474,3 +473,53 @@ export function generateCodeScanningConfig(
return augmentedConfig;
}
/**
* Attempts to parse `contents` into a `UserConfig` value.
*
* @param logger The logger to use.
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
* @param validateConfig Whether to validate the configuration file against the schema.
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
* @throws A `ConfigurationError` if parsing failed.
*/
export function parseUserConfig(
logger: Logger,
pathInput: string,
contents: string,
validateConfig: boolean,
): UserConfig {
try {
const schema =
// eslint-disable-next-line @typescript-eslint/no-require-imports
require("../../src/db-config-schema.json") as jsonschema.Schema;
const doc = yaml.load(contents);
if (validateConfig) {
const result = new jsonschema.Validator().validate(doc, schema);
if (result.errors.length > 0) {
for (const error of result.errors) {
logger.error(error.stack);
}
throw new ConfigurationError(
errorMessages.getInvalidConfigFileMessage(
pathInput,
result.errors.map((e) => e.stack),
),
);
}
}
return doc as UserConfig;
} catch (error) {
if (error instanceof yaml.YAMLException) {
throw new ConfigurationError(
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
);
}
throw error;
}
}

View File

@@ -5,6 +5,7 @@ import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { AnalysisKind } from "./analyses";
import { GitHubApiDetails } from "./api-client";
import * as apiClient from "./api-client";
import { createStubCodeQL } from "./codeql";
@@ -108,6 +109,39 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
});
});
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
const loggedMessages = [];
await uploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
});
test("Abort database upload if running against GHES", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);

View File

@@ -1,6 +1,7 @@
import * as fs from "fs";
import * as actionsUtil from "./actions-util";
import { AnalysisKind } from "./analyses";
import { getApiClient, GitHubApiDetails } from "./api-client";
import { type CodeQL } from "./codeql";
import { Config } from "./config-utils";
@@ -22,6 +23,13 @@ export async function uploadDatabases(
return;
}
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
logger.debug(
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
);
return;
}
if (util.isInTestMode()) {
logger.debug("In test mode. Skipping database upload.");
return;

145
src/db-config-schema.json Normal file
View File

@@ -0,0 +1,145 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "CodeQL Database Configuration",
"description": "Format of the config file supplied by the user for CodeQL analysis",
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Name of the configuration"
},
"disable-default-queries": {
"type": "boolean",
"description": "Whether to disable default queries"
},
"queries": {
"type": "array",
"description": "List of additional queries to run",
"items": {
"$ref": "#/definitions/QuerySpec"
}
},
"paths-ignore": {
"type": "array",
"description": "Paths to ignore during analysis",
"items": {
"type": "string"
}
},
"paths": {
"type": "array",
"description": "Paths to include in analysis",
"items": {
"type": "string"
}
},
"packs": {
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "string"
}
}
}
]
},
"query-filters": {
"type": "array",
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
"items": {
"$ref": "#/definitions/QueryFilter"
}
}
},
"additionalProperties": true,
"definitions": {
"QuerySpec": {
"type": "object",
"description": "Detailed query specification object",
"properties": {
"name": {
"type": "string",
"description": "Optional name for the query"
},
"uses": {
"type": "string",
"description": "The query or query suite to use"
}
},
"required": ["uses"],
"additionalProperties": false
},
"QueryFilter": {
"description": "Query filter that can either include or exclude queries",
"oneOf": [
{
"$ref": "#/definitions/ExcludeQueryFilter"
},
{
"$ref": "#/definitions/IncludeQueryFilter"
},
{}
]
},
"ExcludeQueryFilter": {
"type": "object",
"description": "Filter to exclude queries",
"properties": {
"exclude": {
"type": "object",
"description": "Queries to exclude",
"additionalProperties": {
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "string"
}
]
}
}
},
"required": ["exclude"],
"additionalProperties": false
},
"IncludeQueryFilter": {
"type": "object",
"description": "Filter to include queries",
"properties": {
"include": {
"type": "object",
"description": "Queries to include",
"additionalProperties": {
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "string"
}
]
}
}
},
"required": ["include"],
"additionalProperties": false
}
}
}

View File

@@ -4,7 +4,10 @@ import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import type { PullRequestBranches } from "./actions-util";
import * as apiClient from "./api-client";
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
import {
shouldPerformDiffInformedAnalysis,
exportedForTesting,
} from "./diff-informed-analysis-utils";
import { Feature, Features } from "./feature-flags";
import { getRunnerLogger } from "./logging";
import { parseRepositoryNwo } from "./repository";
@@ -183,3 +186,201 @@ test(
},
false,
);
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("checkout_path")
.returns("/checkout/path");
return exportedForTesting.getDiffRanges(
{
filename: "test.txt",
changes,
patch: patch?.join("\n"),
},
getRunnerLogger(true),
);
}
test("getDiffRanges: file unchanged", async (t) => {
const diffRanges = runGetDiffRanges(0, undefined);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: file diff too large", async (t) => {
const diffRanges = runGetDiffRanges(1000000, undefined);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 0,
endLine: 0,
},
]);
});
test("getDiffRanges: diff thunk with single addition range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
});
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: diff thunk with single update range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
]);
});
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,9 @@",
" a",
" b",
" c",
"+1",
" c",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
{
path: "/checkout/path/test.txt",
startLine: 55,
endLine: 55,
},
]);
});
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
" d",
"-2",
"+3",
" e",
" f",
"+4",
"+5",
" g",
" h",
" i",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 54,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 57,
endLine: 58,
},
]);
});
test("getDiffRanges: multiple diff thunks", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
"@@ -130,6 +150,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 153,
endLine: 154,
},
]);
});
test("getDiffRanges: no diff context lines", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 50,
endLine: 51,
},
]);
});
test("getDiffRanges: malformed thunk header", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, undefined);
});

View File

@@ -3,12 +3,25 @@ import * as path from "path";
import * as actionsUtil from "./actions-util";
import type { PullRequestBranches } from "./actions-util";
import { getGitHubVersion } from "./api-client";
import { getApiClient, getGitHubVersion } from "./api-client";
import type { CodeQL } from "./codeql";
import { Feature, FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
import { getRepositoryNwoFromEnv } from "./repository";
import { GitHubVariant, satisfiesGHESVersion } from "./util";
/**
* This interface is an abbreviated version of the file diff object returned by
* the GitHub API.
*/
interface FileDiff {
filename: string;
changes: number;
// A patch may be absent if the file is binary, if the file diff is too large,
// or if the file is unchanged.
patch?: string | undefined;
}
/**
* Check if the action should perform diff-informed analysis.
*/
@@ -93,3 +106,174 @@ export function readDiffRangesJsonFile(
);
return JSON.parse(jsonContents) as DiffThunkRange[];
}
/**
* Return the file line ranges that were added or modified in the pull request.
*
* @param branches The base and head branches of the pull request.
* @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an
* added or modified range in that file. Returns `undefined` if the action was
* not triggered by a pull request or if there was an error.
*/
export async function getPullRequestEditedDiffRanges(
branches: PullRequestBranches,
logger: Logger,
): Promise<DiffThunkRange[] | undefined> {
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
if (fileDiffs === undefined) {
return undefined;
}
if (fileDiffs.length >= 300) {
// The "compare two commits" API returns a maximum of 300 changed files. If
// we see that many changed files, it is possible that there could be more,
// with the rest being truncated. In this case, we should not attempt to
// compute the diff ranges, as the result would be incomplete.
logger.warning(
`Cannot retrieve the full diff because there are too many ` +
`(${fileDiffs.length}) changed files in the pull request.`,
);
return undefined;
}
const results: DiffThunkRange[] = [];
for (const filediff of fileDiffs) {
const diffRanges = getDiffRanges(filediff, logger);
if (diffRanges === undefined) {
return undefined;
}
results.push(...diffRanges);
}
return results;
}
async function getFileDiffsWithBasehead(
branches: PullRequestBranches,
logger: Logger,
): Promise<FileDiff[] | undefined> {
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
// to GITHUB_REPOSITORY.
const repositoryNwo = getRepositoryNwoFromEnv(
"CODE_SCANNING_REPOSITORY",
"GITHUB_REPOSITORY",
);
const basehead = `${branches.base}...${branches.head}`;
try {
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
basehead,
per_page: 1,
},
);
logger.debug(
`Response from compareCommitsWithBasehead(${basehead}):` +
`\n${JSON.stringify(response, null, 2)}`,
);
return response.data.files;
} catch (error: any) {
if (error.status) {
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
logger.debug(
`Error running compareCommitsWithBasehead(${basehead}):` +
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
);
return undefined;
} else {
throw error;
}
}
}
function getDiffRanges(
fileDiff: FileDiff,
logger: Logger,
): DiffThunkRange[] | undefined {
// Diff-informed queries expect the file path to be absolute. CodeQL always
// uses forward slashes as the path separator, so on Windows we need to
// replace any backslashes with forward slashes.
const filename = path
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
.replaceAll(path.sep, "/");
if (fileDiff.patch === undefined) {
if (fileDiff.changes === 0) {
// There are situations where a changed file legitimately has no diff.
// For example, the file may be a binary file, or that the file may have
// been renamed with no changes to its contents. In these cases, the
// file would be reported as having 0 changes, and we can return an empty
// array to indicate no diff range in this file.
return [];
}
// If a file is reported to have nonzero changes but no patch, that may be
// due to the file diff being too large. In this case, we should fall back
// to a special diff range that covers the entire file.
return [
{
path: filename,
startLine: 0,
endLine: 0,
},
];
}
// The 1-based file line number of the current line
let currentLine = 0;
// The 1-based file line number that starts the current range of added lines
let additionRangeStartLine: number | undefined = undefined;
const diffRanges: DiffThunkRange[] = [];
const diffLines = fileDiff.patch.split("\n");
// Adding a fake context line at the end ensures that the following loop will
// always terminate the last range of added lines.
diffLines.push(" ");
for (const diffLine of diffLines) {
if (diffLine.startsWith("-")) {
// Ignore deletions completely -- we do not even want to consider them when
// calculating consecutive ranges of added lines.
continue;
}
if (diffLine.startsWith("+")) {
if (additionRangeStartLine === undefined) {
additionRangeStartLine = currentLine;
}
currentLine++;
continue;
}
if (additionRangeStartLine !== undefined) {
// Any line that does not start with a "+" or "-" terminates the current
// range of added lines.
diffRanges.push({
path: filename,
startLine: additionRangeStartLine,
endLine: currentLine - 1,
});
additionRangeStartLine = undefined;
}
if (diffLine.startsWith("@@ ")) {
// A new hunk header line resets the current line number.
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
if (match === null) {
logger.warning(
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
);
return undefined;
}
currentLine = parseInt(match[1], 10);
continue;
}
if (diffLine.startsWith(" ")) {
// An unchanged context line advances the current line number.
currentLine++;
continue;
}
}
return diffRanges;
}
export const exportedForTesting = {
getDiffRanges,
};

View File

@@ -14,6 +14,22 @@ export function getConfigFileDoesNotExistErrorMessage(
return `The configuration file "${configFile}" does not exist`;
}
export function getConfigFileParseErrorMessage(
configFile: string,
message: string,
): string {
return `Cannot parse "${configFile}": ${message}`;
}
export function getInvalidConfigFileMessage(
configFile: string,
messages: string[],
): string {
const andMore =
messages.length > 10 ? `, and ${messages.length - 10} more.` : ".";
return `The configuration file "${configFile}" is invalid: ${messages.slice(0, 10).join(", ")}${andMore}`;
}
export function getConfigFileRepoFormatInvalidMessage(
configFile: string,
): string {

View File

@@ -44,6 +44,7 @@ export interface FeatureEnablement {
*/
export enum Feature {
AllowToolcacheInput = "allow_toolcache_input",
AnalyzeUseNewUpload = "analyze_use_new_upload",
CleanupTrapCaches = "cleanup_trap_caches",
CppDependencyInstallation = "cpp_dependency_installation_enabled",
DiffInformedQueries = "diff_informed_queries",
@@ -77,6 +78,7 @@ export enum Feature {
QaTelemetryEnabled = "qa_telemetry_enabled",
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
UseRepositoryProperties = "use_repository_properties",
ValidateDbConfig = "validate_db_config",
}
export const featureConfig: Record<
@@ -115,6 +117,11 @@ export const featureConfig: Record<
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: undefined,
},
[Feature.AnalyzeUseNewUpload]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: undefined,
},
[Feature.CleanupTrapCaches]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -287,6 +294,11 @@ export const featureConfig: Record<
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0",
},
[Feature.ValidateDbConfig]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
minimumVersion: undefined,
},
};
/**
@@ -641,7 +653,7 @@ class GitHubFeatureFlags {
}
this.logger.debug(
"Loaded the following default values for the feature flags from the Code Scanning API:",
"Loaded the following default values for the feature flags from the CodeQL Action API:",
);
for (const [feature, value] of Object.entries(remoteFlags).sort(
([nameA], [nameB]) => nameA.localeCompare(nameB),
@@ -651,12 +663,13 @@ class GitHubFeatureFlags {
this.hasAccessedRemoteFeatureFlags = true;
return remoteFlags;
} catch (e) {
if (util.isHTTPError(e) && e.status === 403) {
const httpError = util.asHTTPError(e);
if (httpError?.status === 403) {
this.logger.warning(
"This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
"As a result, it will not be opted into any experimental features. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`,
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
);
this.hasAccessedRemoteFeatureFlags = false;
return {};

View File

@@ -325,7 +325,7 @@ async function run() {
}
analysisKinds = await getAnalysisKinds(logger);
config = await initConfig({
config = await initConfig(features, {
analysisKinds,
languagesInput: getOptionalInput("languages"),
queriesInput: getOptionalInput("queries"),

View File

@@ -61,10 +61,11 @@ export async function initCodeQL(
}
export async function initConfig(
features: FeatureEnablement,
inputs: configUtils.InitConfigInputs,
): Promise<configUtils.Config> {
return await withGroupAsync("Load language configuration", async () => {
return await configUtils.initConfig(inputs);
return await configUtils.initConfig(features, inputs);
});
}

View File

@@ -11,6 +11,8 @@ import * as gitUtils from "./git-utils";
import { getRunnerLogger } from "./logging";
import {
downloadOverlayBaseDatabaseFromCache,
getCacheRestoreKeyPrefix,
getCacheSaveKey,
OverlayDatabaseMode,
writeBaseDatabaseOidsFile,
writeOverlayChangesFile,
@@ -261,3 +263,48 @@ test(
},
false,
);
test("overlay-base database cache keys remain stable", async (t) => {
const logger = getRunnerLogger(true);
const config = createTestConfig({ languages: ["python", "javascript"] });
const codeQlVersion = "2.23.0";
const commitOid = "abc123def456";
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
sinon.stub(gitUtils, "getCommitOid").resolves(commitOid);
sinon.stub(actionsUtil, "getWorkflowRunID").returns(12345);
sinon.stub(actionsUtil, "getWorkflowRunAttempt").returns(1);
const saveKey = await getCacheSaveKey(
config,
codeQlVersion,
"checkout-path",
logger,
);
const expectedSaveKey =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123def456-12345-1";
t.is(
saveKey,
expectedSaveKey,
"Cache save key changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
const expectedRestoreKeyPrefix =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-";
t.is(
restoreKeyPrefix,
expectedRestoreKeyPrefix,
"Cache restore key prefix changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
t.true(
saveKey.startsWith(restoreKeyPrefix),
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
);
});

View File

@@ -4,13 +4,19 @@ import * as path from "path";
import * as actionsCache from "@actions/cache";
import { getRequiredInput, getTemporaryDirectory } from "./actions-util";
import {
getRequiredInput,
getTemporaryDirectory,
getWorkflowRunAttempt,
getWorkflowRunID,
} from "./actions-util";
import { getAutomationID } from "./api-client";
import { type CodeQL } from "./codeql";
import { type Config } from "./config-utils";
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
import { Logger, withGroupAsync } from "./logging";
import {
getErrorMessage,
isInTestMode,
tryGetFolderBytes,
waitForResultWithTimeLimit,
@@ -34,15 +40,10 @@ export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
* Actions Cache client library. Instead we place a limit on the uncompressed
* size of the overlay-base database.
*
* Assuming 2.5:1 compression ratio, the 15 GB limit on uncompressed data would
* translate to a limit of around 6 GB after compression. This is a high limit
* compared to the default 10GB Actions Cache capacity, but enforcement of Actions
* Cache quotas is not immediate.
*
* TODO: revisit this limit before removing the restriction for overlay analysis
* to the `github` and `dsp-testing` orgs.
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
* translate to a limit of around 3 GB after compression.
*/
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15000;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
@@ -271,6 +272,7 @@ export async function uploadOverlayBaseDatabaseToCache(
config,
codeQlVersion,
checkoutPath,
logger,
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
@@ -448,17 +450,28 @@ export async function downloadOverlayBaseDatabaseFromCache(
* The key consists of the restore key prefix (which does not include the
* commit SHA) and the commit SHA of the current checkout.
*/
async function getCacheSaveKey(
export async function getCacheSaveKey(
config: Config,
codeQlVersion: string,
checkoutPath: string,
logger: Logger,
): Promise<string> {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`,
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
return `${restoreKeyPrefix}${sha}`;
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
/**
@@ -475,7 +488,7 @@ async function getCacheSaveKey(
* not include the commit SHA. This allows us to restore the most recent
* compatible overlay-base database.
*/
async function getCacheRestoreKeyPrefix(
export async function getCacheRestoreKeyPrefix(
config: Config,
codeQlVersion: string,
): Promise<string> {

View File

@@ -168,7 +168,7 @@ export function tryGetTagNameFromUrl(
// assumes less about the structure of the URL.
const match = matches[matches.length - 1];
if (match === null || match.length !== 2) {
if (match?.length !== 2) {
logger.debug(
`Could not determine tag name for URL ${url}. Matched ${JSON.stringify(
match,

View File

@@ -30,7 +30,7 @@ async function runWrapper() {
logger,
);
if ((config && config.debugMode) || core.isDebug()) {
if (config?.debugMode || core.isDebug()) {
const logFilePath = core.getState("proxy-log-file");
logger.info(
"Debug mode is on. Uploading proxy log as Actions debugging artifact...",

View File

@@ -23,7 +23,6 @@ import { getRepositoryNwo } from "./repository";
import { ToolsSource } from "./setup-codeql";
import {
ConfigurationError,
isHTTPError,
getRequiredEnvParam,
getCachedCodeQlVersion,
isInTestMode,
@@ -33,6 +32,7 @@ import {
BuildMode,
getErrorMessage,
getTestingEnvironment,
asHTTPError,
} from "./util";
export enum ActionName {
@@ -387,9 +387,9 @@ export async function createStatusReportBase(
}
const OUT_OF_DATE_MSG =
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
"CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`.";
const INCOMPATIBLE_MSG =
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
"CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`.";
/**
* Send a status report to the code_scanning/analysis/status endpoint.
@@ -429,8 +429,9 @@ export async function sendStatusReport<S extends StatusReportBase>(
},
);
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== undefined) {
switch (httpError.status) {
case 403:
if (
getWorkflowEventName() === "push" &&
@@ -438,16 +439,20 @@ export async function sendStatusReport<S extends StatusReportBase>(
) {
core.warning(
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
"Uploading Code Scanning results requires write access. " +
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
"Uploading CodeQL results requires write access. " +
'To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
`See ${DocUrl.SCANNING_ON_PUSH} for more information on how to configure these events.`,
);
} else {
core.warning(e.message);
core.warning(
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
);
}
return;
case 404:
core.warning(e.message);
core.warning(httpError.message);
return;
case 422:
// schema incompatibility when reporting status
@@ -465,7 +470,7 @@ export async function sendStatusReport<S extends StatusReportBase>(
// something else has gone wrong and the request/response will be logged by octokit
// it's possible this is a transient error and we should continue scanning
core.warning(
`An unexpected error occurred when sending code scanning status report: ${getErrorMessage(
`An unexpected error occurred when sending a status report: ${getErrorMessage(
e,
)}`,
);

View File

@@ -35,14 +35,14 @@ async function getTarVersion(): Promise<TarVersion> {
// Return whether this is GNU tar or BSD tar, and the version number
if (stdout.includes("GNU tar")) {
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "gnu", version: match[1] };
} else if (stdout.includes("bsdtar")) {
const match = stdout.match(/bsdtar ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}

View File

@@ -2,7 +2,7 @@ import { TextDecoder } from "node:util";
import path from "path";
import * as github from "@actions/github";
import { TestFn } from "ava";
import { ExecutionContext, TestFn } from "ava";
import nock from "nock";
import * as sinon from "sinon";
@@ -180,6 +180,23 @@ export function getRecordingLogger(messages: LoggedMessage[]): Logger {
};
}
export function checkExpectedLogMessages(
t: ExecutionContext<any>,
messages: LoggedMessage[],
expectedMessages: string[],
) {
for (const expectedMessage of expectedMessages) {
t.assert(
messages.some(
(msg) =>
typeof msg.message === "string" &&
msg.message.includes(expectedMessage),
),
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
);
}
}
/** Mock the HTTP request to the feature flags enablement API endpoint. */
export function mockFeatureFlagApiEndpoint(
responseStatusCode: number,

View File

@@ -3,13 +3,11 @@ import * as semver from "semver";
import type { VersionInfo } from "./codeql";
export enum ToolsFeature {
AnalysisSummaryV2IsDefault = "analysisSummaryV2Default",
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
ForceOverwrite = "forceOverwrite",
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib",
SarifMergeRunsFromEqualCategory = "sarifMergeRunsFromEqualCategory",
}
/**

View File

@@ -13,8 +13,8 @@ import * as gitUtils from "./git-utils";
import { Language } from "./languages";
import { Logger } from "./logging";
import {
asHTTPError,
getErrorMessage,
isHTTPError,
tryGetFolderBytes,
waitForResultWithTimeLimit,
} from "./util";
@@ -236,7 +236,7 @@ export async function cleanupTrapCaches(
}
return { trap_cache_cleanup_size_bytes: totalBytesCleanedUp };
} catch (e) {
if (isHTTPError(e) && e.status === 403) {
if (asHTTPError(e)?.status === 403) {
logger.warning(
"Could not cleanup TRAP caches as the token did not have the required permissions. " +
'To clean up TRAP caches, ensure the token has the "actions:write" permission. ' +

View File

@@ -21,7 +21,6 @@ import * as gitUtils from "./git-utils";
import { initCodeQL } from "./init";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import { ToolsFeature } from "./tools-features";
import * as util from "./util";
import {
ConfigurationError,
@@ -269,32 +268,6 @@ async function combineSarifFilesUsingCLI(
codeQL = initCodeQLResult.codeql;
}
if (
!(await codeQL.supportsFeature(
ToolsFeature.SarifMergeRunsFromEqualCategory,
))
) {
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
logger.warning(
"The CodeQL CLI does not support merging SARIF files. Merging files in the action.",
);
if (
await shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects,
gitHubVersion,
)
) {
logger.warning(
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`,
);
core.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
}
return combineSarifFiles(sarifFiles, logger);
}
const baseTempDir = path.resolve(tempDir, "combined-sarif");
fs.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs.mkdtempSync(path.resolve(baseTempDir, "output-"));
@@ -386,16 +359,17 @@ export async function uploadPayload(
logger.info("Successfully uploaded results");
return response.data.id as string;
} catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
const httpError = util.asHTTPError(e);
if (httpError !== undefined) {
switch (httpError.status) {
case 403:
core.warning(e.message || GENERIC_403_MSG);
core.warning(httpError.message || GENERIC_403_MSG);
break;
case 404:
core.warning(e.message || GENERIC_404_MSG);
core.warning(httpError.message || GENERIC_404_MSG);
break;
default:
core.warning(e.message);
core.warning(httpError.message);
break;
}
}
@@ -687,51 +661,39 @@ export function buildPayload(
return payloadObj;
}
/**
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
* to.
*/
export async function uploadFiles(
inputSarifPath: string,
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate,
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget,
);
export interface PostProcessingResults {
sarif: util.SarifFile;
analysisKey: string;
environment: string;
}
/**
* Uploads the given array of SARIF files.
* Performs post-processing of the SARIF files given by `sarifPaths`.
*
* @param logger The logger to use.
* @param features Information about enabled features.
* @param checkoutPath The path where the repo was checked out at.
* @param sarifPaths The paths of the SARIF files to post-process.
* @param category The analysis category.
* @param analysis The analysis configuration.
*
* @returns Returns the results of post-processing the SARIF files,
* including the resulting SARIF file.
*/
export async function uploadSpecifiedFiles(
sarifPaths: string[],
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
export async function postProcessSarifFiles(
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
features: FeatureEnablement,
checkoutPath: string,
sarifPaths: string[],
category: string | undefined,
analysis: analyses.AnalysisConfig,
): Promise<PostProcessingResults> {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif: SarifFile;
category = uploadTarget.fixCategory(logger, category);
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
// Validate that the files we were asked to upload are all valid SARIF files
@@ -767,6 +729,113 @@ export async function uploadSpecifiedFiles(
environment,
);
return { sarif, analysisKey, environment };
}
/**
* Writes the post-processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`.
*
* @param logger The logger to use.
* @param pathInput The input provided for `post-processed-sarif-path`.
* @param uploadTarget The upload target.
* @param processingResults The results of post-processing SARIF files.
*/
export async function writePostProcessedFiles(
logger: Logger,
pathInput: string | undefined,
uploadTarget: analyses.AnalysisConfig,
postProcessingResults: PostProcessingResults,
) {
// If there's an explicit input, use that. Otherwise, use the value from the environment variable.
const outputPath = pathInput || util.getOptionalEnvVar(EnvVar.SARIF_DUMP_DIR);
// If we have a non-empty output path, write the SARIF file to it.
if (outputPath !== undefined) {
dumpSarifFile(
JSON.stringify(postProcessingResults.sarif),
outputPath,
logger,
uploadTarget,
);
} else {
logger.debug(`Not writing post-processed SARIF files.`);
}
}
/**
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
* to.
*/
export async function uploadFiles(
inputSarifPath: string,
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate,
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget,
);
}
/**
* Uploads the given array of SARIF files.
*/
async function uploadSpecifiedFiles(
sarifPaths: string[],
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const processingResults: PostProcessingResults = await postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifPaths,
category,
uploadTarget,
);
return uploadPostProcessedFiles(
logger,
checkoutPath,
uploadTarget,
processingResults,
);
}
/**
* Uploads the results of post-processing SARIF files to the specified upload target.
*
* @param logger The logger to use.
* @param checkoutPath The path at which the repository was checked out.
* @param uploadTarget The analysis configuration.
* @param postProcessingResults The results of post-processing SARIF files.
*
* @returns The results of uploading the `postProcessingResults` to `uploadTarget`.
*/
export async function uploadPostProcessedFiles(
logger: Logger,
checkoutPath: string,
uploadTarget: analyses.AnalysisConfig,
postProcessingResults: PostProcessingResults,
): Promise<UploadResult> {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = util.getToolNames(sarif);
logger.debug(`Validating that each SARIF run has a unique category`);
@@ -774,11 +843,6 @@ export async function uploadSpecifiedFiles(
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
if (dumpDir) {
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
}
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -786,13 +850,13 @@ export async function uploadSpecifiedFiles(
const payload = buildPayload(
await gitUtils.getCommitOid(checkoutPath),
await gitUtils.getRef(),
analysisKey,
postProcessingResults.analysisKey,
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
actionsUtil.getWorkflowRunID(),
actionsUtil.getWorkflowRunAttempt(),
checkoutURI,
environment,
postProcessingResults.environment,
toolNames,
await gitUtils.determineBaseBranchHeadCommitOid(),
);
@@ -838,14 +902,14 @@ function dumpSarifFile(
fs.mkdirSync(outputDir, { recursive: true });
} else if (!fs.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path specified by the ${EnvVar.SARIF_DUMP_DIR} environment variable exists and is not a directory: ${outputDir}`,
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`,
);
}
const outputFile = path.resolve(
outputDir,
`upload${uploadTarget.sarifExtension}`,
);
logger.info(`Dumping processed SARIF file to ${outputFile}`);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs.writeFileSync(outputFile, sarifPayload);
}

View File

@@ -16,7 +16,7 @@ import {
isThirdPartyAnalysis,
} from "./status-report";
import * as upload_lib from "./upload-lib";
import { uploadSarif } from "./upload-sarif";
import { postProcessAndUploadSarif } from "./upload-sarif";
import {
ConfigurationError,
checkActionVersion,
@@ -90,9 +90,10 @@ async function run() {
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
const category = actionsUtil.getOptionalInput("category");
const uploadResults = await uploadSarif(
const uploadResults = await postProcessAndUploadSarif(
logger,
features,
"always",
checkoutPath,
sarifPath,
category,

View File

@@ -9,7 +9,7 @@ import { getRunnerLogger } from "./logging";
import { createFeatures, setupTests } from "./testing-utils";
import { UploadResult } from "./upload-lib";
import * as uploadLib from "./upload-lib";
import { uploadSarif } from "./upload-sarif";
import { postProcessAndUploadSarif } from "./upload-sarif";
import * as util from "./util";
setupTests(test);
@@ -19,7 +19,27 @@ interface UploadSarifExpectedResult {
expectedFiles?: string[];
}
const uploadSarifMacro = test.macro({
function mockPostProcessSarifFiles() {
const postProcessSarifFiles = sinon.stub(uploadLib, "postProcessSarifFiles");
for (const analysisKind of Object.values(AnalysisKind)) {
const analysisConfig = getAnalysisConfig(analysisKind);
postProcessSarifFiles
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
analysisConfig,
)
.resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" });
}
return postProcessSarifFiles;
}
const postProcessAndUploadSarifMacro = test.macro({
exec: async (
t: ExecutionContext<unknown>,
sarifFiles: string[],
@@ -33,21 +53,16 @@ const uploadSarifMacro = test.macro({
const toFullPath = (filename: string) => path.join(tempDir, filename);
const uploadSpecifiedFiles = sinon.stub(
const postProcessSarifFiles = mockPostProcessSarifFiles();
const uploadPostProcessedFiles = sinon.stub(
uploadLib,
"uploadSpecifiedFiles",
"uploadPostProcessedFiles",
);
for (const analysisKind of Object.values(AnalysisKind)) {
uploadSpecifiedFiles
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
features,
logger,
getAnalysisConfig(analysisKind),
)
const analysisConfig = getAnalysisConfig(analysisKind);
uploadPostProcessedFiles
.withArgs(logger, sinon.match.any, analysisConfig, sinon.match.any)
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
}
@@ -56,53 +71,57 @@ const uploadSarifMacro = test.macro({
fs.writeFileSync(sarifFile, "");
}
const actual = await uploadSarif(logger, features, "", testPath);
const actual = await postProcessAndUploadSarif(
logger,
features,
"always",
"",
testPath,
);
for (const analysisKind of Object.values(AnalysisKind)) {
const analysisKindResult = expectedResult[analysisKind];
if (analysisKindResult) {
// We are expecting a result for this analysis kind, check that we have it.
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
// Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths
// Additionally, check that the mocked `postProcessSarifFiles` was called with only the file paths
// that we expected it to be called with.
t.assert(
uploadSpecifiedFiles.calledWith(
postProcessSarifFiles.calledWith(
logger,
features,
sinon.match.any,
analysisKindResult.expectedFiles?.map(toFullPath) ??
fullSarifPaths,
sinon.match.any,
sinon.match.any,
features,
logger,
getAnalysisConfig(analysisKind),
),
);
} else {
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
// is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this
// is also returned by our mocked `uploadProcessedFiles` when there is no expected result for this
// analysis kind.
t.is(actual[analysisKind], undefined);
// Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind.
// Therefore, we also check that the mocked `uploadProcessedFiles` was not called for this analysis kind.
t.assert(
!uploadSpecifiedFiles.calledWith(
sinon.match.any,
sinon.match.any,
sinon.match.any,
features,
!uploadPostProcessedFiles.calledWith(
logger,
sinon.match.any,
getAnalysisConfig(analysisKind),
sinon.match.any,
),
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
`uploadProcessedFiles was called for ${analysisKind}, but should not have been.`,
);
}
}
});
},
title: (providedTitle = "") => `uploadSarif - ${providedTitle}`,
title: (providedTitle = "") => `processAndUploadSarif - ${providedTitle}`,
});
test(
"SARIF file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.sarif"],
(tempDir) => path.join(tempDir, "test.sarif"),
{
@@ -117,7 +136,7 @@ test(
test(
"JSON file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.json"],
(tempDir) => path.join(tempDir, "test.json"),
{
@@ -132,7 +151,7 @@ test(
test(
"Code Scanning files",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.json", "test.sarif"],
undefined,
{
@@ -148,7 +167,7 @@ test(
test(
"Code Quality file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.quality.sarif"],
(tempDir) => path.join(tempDir, "test.quality.sarif"),
{
@@ -163,7 +182,7 @@ test(
test(
"Mixed files",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.sarif", "test.quality.sarif"],
undefined,
{
@@ -183,3 +202,65 @@ test(
},
},
);
test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => {
await util.withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const features = createFeatures([]);
const toFullPath = (filename: string) => path.join(tempDir, filename);
const postProcessSarifFiles = mockPostProcessSarifFiles();
const uploadPostProcessedFiles = sinon.stub(
uploadLib,
"uploadPostProcessedFiles",
);
fs.writeFileSync(toFullPath("test.sarif"), "");
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
const actual = await postProcessAndUploadSarif(
logger,
features,
"never",
"",
tempDir,
);
t.truthy(actual);
t.assert(postProcessSarifFiles.calledTwice);
t.assert(uploadPostProcessedFiles.notCalled);
});
});
test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => {
await util.withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const features = createFeatures([]);
const toFullPath = (filename: string) => path.join(tempDir, filename);
const postProcessSarifFiles = mockPostProcessSarifFiles();
fs.writeFileSync(toFullPath("test.sarif"), "");
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
const postProcessedOutPath = path.join(tempDir, "post-processed");
const actual = await postProcessAndUploadSarif(
logger,
features,
"never",
"",
tempDir,
"",
postProcessedOutPath,
);
t.truthy(actual);
t.assert(postProcessSarifFiles.calledTwice);
t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif")));
t.assert(
fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")),
);
});
});

View File

@@ -1,3 +1,4 @@
import { UploadKind } from "./actions-util";
import * as analyses from "./analyses";
import { FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
@@ -10,22 +11,26 @@ export type UploadSarifResults = Partial<
>;
/**
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
* Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services.
*
* @param logger The logger to use.
* @param features Information about enabled features.
* @param uploadKind The kind of upload that is requested.
* @param checkoutPath The path where the repository was checked out at.
* @param sarifPath The path to the file or directory to upload.
* @param category The analysis category.
* @param postProcessedOutputPath The path to a directory to which the post-processed SARIF files should be written to.
*
* @returns A partial mapping from analysis kinds to the upload results.
*/
export async function uploadSarif(
export async function postProcessAndUploadSarif(
logger: Logger,
features: FeatureEnablement,
uploadKind: UploadKind,
checkoutPath: string,
sarifPath: string,
category?: string,
postProcessedOutputPath?: string,
): Promise<UploadSarifResults> {
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
logger,
@@ -37,14 +42,33 @@ export async function uploadSarif(
sarifGroups,
)) {
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
const postProcessingResults = await upload_lib.postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifFiles,
category,
analysisConfig,
);
// Write the post-processed SARIF files to disk. This will only write them if needed based on user inputs
// or environment variables.
await upload_lib.writePostProcessedFiles(
logger,
postProcessedOutputPath,
analysisConfig,
postProcessingResults,
);
// Only perform the actual upload of the post-processed files if `uploadKind` is `always`.
if (uploadKind === "always") {
uploadResults[analysisKind] = await upload_lib.uploadPostProcessedFiles(
logger,
checkoutPath,
analysisConfig,
postProcessingResults,
);
}
}
return uploadResults;

View File

@@ -252,6 +252,35 @@ test("allowed API versions", async (t) => {
);
});
test("getRequiredEnvParam - gets environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "foo";
const result = util.getRequiredEnvParam("SOME_UNIT_TEST_VAR");
t.is(result, "foo");
});
test("getRequiredEnvParam - throws if an environment variable isn't set", (t) => {
t.throws(() => util.getRequiredEnvParam("SOME_UNIT_TEST_VAR"));
});
test("getOptionalEnvVar - gets environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "foo";
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, "foo");
});
test("getOptionalEnvVar - gets undefined for empty environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "";
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, undefined);
});
test("getOptionalEnvVar - doesn't throw for undefined environment variables", (t) => {
t.notThrows(() => {
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, undefined);
});
});
test("doesDirectoryExist", async (t) => {
// Returns false if no file/dir of this name exists
t.false(util.doesDirectoryExist("non-existent-file.txt"));

View File

@@ -1,11 +1,11 @@
import * as fs from "fs";
import * as fsPromises from "fs/promises";
import * as os from "os";
import * as path from "path";
import * as core from "@actions/core";
import * as exec from "@actions/exec/lib/exec";
import * as io from "@actions/io";
import checkDiskSpace from "check-disk-space";
import * as del from "del";
import getFolderSize from "get-folder-size";
import * as yaml from "js-yaml";
@@ -673,6 +673,17 @@ export function getRequiredEnvParam(paramName: string): string {
return value;
}
/**
* Get an environment variable, but return `undefined` if it is not set or empty.
*/
export function getOptionalEnvVar(paramName: string): string | undefined {
const value = process.env[paramName];
if (value?.trim().length === 0) {
return undefined;
}
return value;
}
export class HTTPError extends Error {
public status: number;
@@ -692,8 +703,22 @@ export class ConfigurationError extends Error {
}
}
export function isHTTPError(arg: any): arg is HTTPError {
return arg?.status !== undefined && Number.isInteger(arg.status);
export function asHTTPError(arg: any): HTTPError | undefined {
if (
typeof arg !== "object" ||
arg === null ||
typeof arg.message !== "string"
) {
return undefined;
}
if (Number.isInteger(arg.status)) {
return new HTTPError(arg.message as string, arg.status as number);
}
// See https://github.com/actions/toolkit/blob/acb230b99a46ed33a3f04a758cd68b47b9a82908/packages/tool-cache/src/tool-cache.ts#L19
if (Number.isInteger(arg.httpStatusCode)) {
return new HTTPError(arg.message as string, arg.httpStatusCode as number);
}
return undefined;
}
let cachedCodeQlVersion: undefined | VersionInfo = undefined;
@@ -1074,24 +1099,17 @@ export async function checkDiskUsage(
logger: Logger,
): Promise<DiskUsage | undefined> {
try {
// We avoid running the `df` binary under the hood for macOS ARM runners with SIP disabled.
if (
process.platform === "darwin" &&
(process.arch === "arm" || process.arch === "arm64") &&
!(await checkSipEnablement(logger))
) {
return undefined;
}
const diskUsage = await checkDiskSpace(
const diskUsage = await fsPromises.statfs(
getRequiredEnvParam("GITHUB_WORKSPACE"),
);
const mbInBytes = 1024 * 1024;
const gbInBytes = 1024 * 1024 * 1024;
if (diskUsage.free < 2 * gbInBytes) {
const blockSizeInBytes = diskUsage.bsize;
const numBlocksPerMb = (1024 * 1024) / blockSizeInBytes;
const numBlocksPerGb = (1024 * 1024 * 1024) / blockSizeInBytes;
if (diskUsage.bavail < 2 * numBlocksPerGb) {
const message =
"The Actions runner is running low on disk space " +
`(${(diskUsage.free / mbInBytes).toPrecision(4)} MB available).`;
`(${(diskUsage.bavail / numBlocksPerMb).toPrecision(4)} MB available).`;
if (process.env[EnvVar.HAS_WARNED_ABOUT_DISK_SPACE] !== "true") {
logger.warning(message);
} else {
@@ -1100,8 +1118,8 @@ export async function checkDiskUsage(
core.exportVariable(EnvVar.HAS_WARNED_ABOUT_DISK_SPACE, "true");
}
return {
numAvailableBytes: diskUsage.free,
numTotalBytes: diskUsage.size,
numAvailableBytes: diskUsage.bavail * blockSizeInBytes,
numTotalBytes: diskUsage.blocks * blockSizeInBytes,
};
} catch (error) {
logger.warning(

View File

@@ -371,7 +371,7 @@ function getInputOrThrow(
input = input.replace(`\${{matrix.${key}}}`, value);
}
}
if (input !== undefined && input.includes("${{")) {
if (input?.includes("${{")) {
throw new Error(
`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`,
);