mirror of
https://github.com/github/codeql-action.git
synced 2025-12-11 18:24:43 +08:00
Compare commits
162 Commits
cklin/over
...
copilot/up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa79376718 | ||
|
|
80f8fb8bc7 | ||
|
|
1373f1f5c9 | ||
|
|
ee753b4724 | ||
|
|
db6938a4d0 | ||
|
|
c77b3fb96e | ||
|
|
2a54ab5016 | ||
|
|
17783bfb99 | ||
|
|
3c764cd93a | ||
|
|
e1968324ff | ||
|
|
2a6736cca7 | ||
|
|
c8765c966b | ||
|
|
61789e2fdb | ||
|
|
5cd2d139cb | ||
|
|
f443b600d9 | ||
|
|
7a2cb623ed | ||
|
|
527f0f324a | ||
|
|
f402506f0f | ||
|
|
f5e53f9476 | ||
|
|
4e90a42a3e | ||
|
|
413a4a4df1 | ||
|
|
452186448a | ||
|
|
eadf14bf6e | ||
|
|
e1257b6fda | ||
|
|
b516b1d4bc | ||
|
|
168b2dee16 | ||
|
|
4704ab1869 | ||
|
|
dc2ced8385 | ||
|
|
5c752c85dd | ||
|
|
e74435a1da | ||
|
|
524b9a00e8 | ||
|
|
a512fe0868 | ||
|
|
62f0f21c3c | ||
|
|
a8440d08d5 | ||
|
|
610c7c68e3 | ||
|
|
ff2fc66cc1 | ||
|
|
a841c540b7 | ||
|
|
aeb12f6eaa | ||
|
|
6fd4ceb7bb | ||
|
|
196a3e577b | ||
|
|
98abb870dc | ||
|
|
bdd2cdf891 | ||
|
|
fb148789ab | ||
|
|
2ff418f28a | ||
|
|
527501d15d | ||
|
|
621809b239 | ||
|
|
8301b8b096 | ||
|
|
7bdfa9736a | ||
|
|
a57997f2d2 | ||
|
|
4489a63a9d | ||
|
|
1707898e5b | ||
|
|
d05f2255a0 | ||
|
|
7892cb2362 | ||
|
|
8a6b62bc2d | ||
|
|
d95a3b53f8 | ||
|
|
257e42ce3d | ||
|
|
074940162c | ||
|
|
e296a93559 | ||
|
|
df65651d4f | ||
|
|
1b09eb4ccc | ||
|
|
2f11c17b09 | ||
|
|
0ba4970165 | ||
|
|
5431b6a308 | ||
|
|
7f5db167b6 | ||
|
|
239d7b286f | ||
|
|
86b2ad6646 | ||
|
|
5dfb610e99 | ||
|
|
1491baa17e | ||
|
|
db562a696f | ||
|
|
93c16735fa | ||
|
|
55283843ca | ||
|
|
6877465dc1 | ||
|
|
ff23a55f4d | ||
|
|
b66db86c84 | ||
|
|
00a6e13cbf | ||
|
|
25c8db918a | ||
|
|
dabf6fc578 | ||
|
|
14c5d77032 | ||
|
|
380e002752 | ||
|
|
680b07003d | ||
|
|
22aba57acf | ||
|
|
11e4034414 | ||
|
|
882667e383 | ||
|
|
6f964b7776 | ||
|
|
6bdf5d3d00 | ||
|
|
9b3ade946d | ||
|
|
e0b9da7b0a | ||
|
|
726a341ed4 | ||
|
|
1cc5eb6636 | ||
|
|
43ce7ef399 | ||
|
|
4d0c164f60 | ||
|
|
b2e22323e2 | ||
|
|
dd9e24a8a4 | ||
|
|
13a3a6890f | ||
|
|
7d468c931c | ||
|
|
425ef85595 | ||
|
|
297313df79 | ||
|
|
065c6cfb78 | ||
|
|
7fb8378d93 | ||
|
|
dddf033776 | ||
|
|
54ae8ba5b1 | ||
|
|
65e9e640ee | ||
|
|
21a7ba37dd | ||
|
|
70836b1ec4 | ||
|
|
205744e04f | ||
|
|
64d10c1313 | ||
|
|
909610e8a8 | ||
|
|
d899b2ed98 | ||
|
|
10feb5d2a2 | ||
|
|
4182ea3d4e | ||
|
|
34afe5b7b1 | ||
|
|
096fe67f97 | ||
|
|
b4964014ad | ||
|
|
d573787cca | ||
|
|
15916800df | ||
|
|
cb5a2849ac | ||
|
|
6de1d741f6 | ||
|
|
a506145f31 | ||
|
|
aac66ec793 | ||
|
|
91a63dc72c | ||
|
|
d25fa60a90 | ||
|
|
3adb1ff7b8 | ||
|
|
d4b5380db4 | ||
|
|
d4bbcb74ca | ||
|
|
180438161e | ||
|
|
d7ada03e02 | ||
|
|
30445af89f | ||
|
|
7434149006 | ||
|
|
9a0b46abff | ||
|
|
b8c496644d | ||
|
|
ad086e4d90 | ||
|
|
47b5ac77ee | ||
|
|
b5caf1196e | ||
|
|
80cb6b56b9 | ||
|
|
d44c8b3e18 | ||
|
|
93711d3d89 | ||
|
|
056fb86575 | ||
|
|
63d1b25e97 | ||
|
|
717d581574 | ||
|
|
0417531633 | ||
|
|
13ae3d4328 | ||
|
|
fe0376ed1f | ||
|
|
36adfa7b0f | ||
|
|
97159624c3 | ||
|
|
f0a08a4bf5 | ||
|
|
73fbfb0bbf | ||
|
|
5fd2cfe1ef | ||
|
|
2adc894410 | ||
|
|
5b3f0ded91 | ||
|
|
6e0b0872fa | ||
|
|
9f452fad0f | ||
|
|
5fc9e66105 | ||
|
|
e6768a18cf | ||
|
|
2f0649510e | ||
|
|
f19a3e769f | ||
|
|
31bfb99f0d | ||
|
|
ed57767898 | ||
|
|
2ff902e1f1 | ||
|
|
7dfbfdcb01 | ||
|
|
3d7d7c978e | ||
|
|
249a3cbb5c | ||
|
|
11480e326c |
2
.github/actions/check-sarif/action.yml
vendored
2
.github/actions/check-sarif/action.yml
vendored
@@ -16,5 +16,5 @@ inputs:
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: index.js
|
||||
|
||||
5
.github/actions/prepare-test/action.yml
vendored
5
.github/actions/prepare-test/action.yml
vendored
@@ -2,7 +2,7 @@ name: "Prepare test"
|
||||
description: Performs some preparation to run tests
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'nightly', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'toolcache', 'nightly', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
required: true
|
||||
use-all-platform-bundle:
|
||||
description: "If true, we output a tools URL with codeql-bundle.tar.gz file rather than platform-specific URL"
|
||||
@@ -41,6 +41,9 @@ runs:
|
||||
elif [[ "$VERSION" == "linked" ]]; then
|
||||
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "toolcache" ]]; then
|
||||
echo "tools-url=toolcache" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "default" ]]; then
|
||||
echo "tools-url=" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -16,9 +16,12 @@ updates:
|
||||
- dependency-name: "eslint-plugin-import"
|
||||
versions: [">=2.30.0"]
|
||||
groups:
|
||||
npm:
|
||||
npm-minor:
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
- package-ecosystem: github-actions
|
||||
directories:
|
||||
- "/.github/workflows"
|
||||
@@ -28,6 +31,9 @@ updates:
|
||||
labels:
|
||||
- Rebuild
|
||||
groups:
|
||||
actions:
|
||||
actions-minor:
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
51
.github/pull_request_template.md
vendored
51
.github/pull_request_template.md
vendored
@@ -1,4 +1,13 @@
|
||||
<!-- For GitHub staff: Remember that this is a public repository. -->
|
||||
<!--
|
||||
For GitHub staff: Remember that this is a public repository. Do not link to internal resources.
|
||||
If necessary, link to this PR from an internal issue and include further details there.
|
||||
|
||||
Everyone: Include a summary of the context of this change, what it aims to accomplish, and why you
|
||||
chose the approach you did if applicable. Indicate any open questions you want to answer
|
||||
during the review process and anything you want reviewers to pay particular attention to.
|
||||
|
||||
See https://github.com/github/codeql-action/blob/main/CONTRIBUTING.md for additional information.
|
||||
-->
|
||||
|
||||
### Risk assessment
|
||||
|
||||
@@ -7,8 +16,48 @@ For internal use only. Please select the risk level of this change:
|
||||
- **Low risk:** Changes are fully under feature flags, or have been fully tested and validated in pre-production environments and are highly observable, or are documentation or test only.
|
||||
- **High risk:** Changes are not fully under feature flags, have limited visibility and/or cannot be tested outside of production.
|
||||
|
||||
#### Which use cases does this change impact?
|
||||
|
||||
<!-- Delete options that don't apply. -->
|
||||
|
||||
- **Advanced setup** - Impacts users who have custom workflows.
|
||||
- **Default setup** - Impacts users who use default setup.
|
||||
- **Code Scanning** - Impacts Code Scanning (i.e. `analysis-kinds: code-scanning`).
|
||||
- **Code Quality** - Impacts Code Quality (i.e. `analysis-kinds: code-quality`).
|
||||
- **Third-party analyses** - Impacts third-party analyses (i.e. `upload-sarif`).
|
||||
- **GHES** - Impacts GitHub Enterprise Server.
|
||||
|
||||
#### How did/will you validate this change?
|
||||
|
||||
<!-- Delete options that don't apply. Be explicit about test coverage. -->
|
||||
|
||||
- **Test repository** - This change will be tested on a test repository before merging.
|
||||
- **Unit tests** - I am depending on existing unit test coverage (i.e. tests in `.test.ts` files).
|
||||
- **End-to-end tests** - I am depending on PR checks (i.e. tests in `pr-checks`).
|
||||
- **New / updated tests** - I have added or updated tests (summarize below).
|
||||
- **Other** - Please provide details.
|
||||
- **None** - I am not validating these changes (provide justification below).
|
||||
|
||||
#### If something goes wrong after this change is released, what are the mitigation and rollback strategies?
|
||||
|
||||
<!-- Delete strategies that don't apply. -->
|
||||
|
||||
- **Feature flags** - All new or changed code paths can be fully disabled with corresponding feature flags.
|
||||
- **Rollback** - Change can only be disabled by rolling back the release or releasing a new version with a fix.
|
||||
- **Other** - Please provide details.
|
||||
|
||||
#### How will you know if something goes wrong after this change is released?
|
||||
|
||||
<!-- Delete options that don't apply. -->
|
||||
|
||||
- **Telemetry** - I rely on existing telemetry or have made changes to the telemetry.
|
||||
- **Dashboards** - I will watch relevant dashboards for issues after the release. Consider whether this requires this change to be released at a particular time rather than as part of a regular release.
|
||||
- **Alerts** - New or existing monitors will trip if something goes wrong with this change.
|
||||
- **Other** - Please provide details.
|
||||
|
||||
### Merge / deployment checklist
|
||||
|
||||
- Confirm this change is backwards compatible with existing workflows.
|
||||
- Confirm that tests have been added/updated or are not needed.
|
||||
- Consider adding a [changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) entry for this change.
|
||||
- Confirm the [readme](https://github.com/github/codeql-action/blob/main/README.md) and docs have been updated if necessary.
|
||||
|
||||
8
.github/update-release-branch.py
vendored
8
.github/update-release-branch.py
vendored
@@ -371,10 +371,10 @@ def main():
|
||||
# releases.
|
||||
run_git('revert', vOlder_update_commits[0], '--no-edit')
|
||||
|
||||
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||
print(f' Reverting {update_dependencies_commit}')
|
||||
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||
# Also revert the "Rebuild" commit created by Actions.
|
||||
rebuild_commit = run_git('log', '--grep', '^Rebuild$', '--format=%H').split()[0]
|
||||
print(f' Reverting {rebuild_commit}')
|
||||
run_git('revert', rebuild_commit, '--no-edit')
|
||||
|
||||
else:
|
||||
print(' Nothing to revert.')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Upload-sarif: code quality endpoint'
|
||||
name: 'PR Check - Bundle: From toolcache'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -21,19 +21,9 @@ on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
inputs: {}
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
inputs: {}
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -41,14 +31,14 @@ concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-quality-sarif:
|
||||
bundle-from-toolcache:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
name: 'Upload-sarif: code quality endpoint'
|
||||
version: toolcache
|
||||
name: 'Bundle: From toolcache'
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -65,31 +55,32 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v6
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
- name: Check output from `upload-sarif` step
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
run: exit 1
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
console.log(`Found CodeQL versions: ${allCodeqlVersions}`);
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error('CodeQL not found in toolcache');
|
||||
}
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@0481980f17b760ef6bca5e8c55809102a0af1e5a # v1.263.0
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
158
.github/workflows/__upload-sarif.yml
generated
vendored
Normal file
158
.github/workflows/__upload-sarif.yml
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test different uses of `upload-sarif`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-sarif:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-scanning
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-quality
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
name: Test different uses of `upload-sarif`
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: ${{ matrix.analysis-kinds }}
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
output: ${{ runner.temp }}/results
|
||||
|
||||
- name: |
|
||||
Upload all SARIF files for `analysis-kinds: ${{ matrix.analysis-kinds }}`
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
|
||||
- name: Fail for missing output from `upload-sarif` step for `code-scanning`
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
- name: Fail for missing output from `upload-sarif` step for `code-quality`
|
||||
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
|
||||
run: exit 1
|
||||
|
||||
- name: Upload single SARIF file for Code Scanning
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-scanning
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
|
||||
- name: Fail for missing output from `upload-single-sarif-code-scanning` step
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') &&
|
||||
!(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
- name: Upload single SARIF file for Code Quality
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-quality
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
|
||||
- name: Fail for missing output from `upload-single-sarif-code-quality` step
|
||||
if: contains(matrix.analysis-kinds, 'code-quality') &&
|
||||
!(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
|
||||
run: exit 1
|
||||
|
||||
- name: Change SARIF file extension
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
|
||||
- name: Upload single non-`.sarif` file
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-non-sarif
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
|
||||
- name: Fail for missing output from `upload-single-non-sarif` step
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
13
.github/workflows/__with-checkout-path.yml
generated
vendored
13
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -103,29 +103,30 @@ jobs:
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
run: |
|
||||
PAYLOAD_FILE="$RUNNER_TEMP/payload-code-scanning.json"
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
ACTUAL_COMMIT_OID="$(cat "$PAYLOAD_FILE" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$PAYLOAD_FILE" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$PAYLOAD_FILE" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '20'
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
|
||||
1
.github/workflows/post-release-mergeback.yml
vendored
1
.github/workflows/post-release-mergeback.yml
vendored
@@ -146,6 +146,7 @@ jobs:
|
||||
private-key: ${{ secrets.AUTOMATION_PRIVATE_KEY }}
|
||||
|
||||
- name: Create the GitHub release
|
||||
if: steps.check.outputs.exists != 'true'
|
||||
env:
|
||||
PARTIAL_CHANGELOG: "${{ runner.temp }}/partial_changelog.md"
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
|
||||
7
.github/workflows/pr-checks.yml
vendored
7
.github/workflows/pr-checks.yml
vendored
@@ -20,6 +20,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
node-version: [20, 24]
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write # needed to upload ESLint results
|
||||
@@ -36,7 +37,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '20.x'
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
|
||||
- name: Set up Python
|
||||
@@ -72,8 +73,8 @@ jobs:
|
||||
run: npm run lint-ci
|
||||
|
||||
- name: Upload sarif
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
uses: github/codeql-action/upload-sarif@v4
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.node-version == 24
|
||||
with:
|
||||
sarif_file: eslint.sarif
|
||||
category: eslint
|
||||
|
||||
2
.github/workflows/query-filters.yml
vendored
2
.github/workflows/query-filters.yml
vendored
@@ -34,7 +34,7 @@ jobs:
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 24
|
||||
cache: npm
|
||||
|
||||
- name: Install dependencies
|
||||
|
||||
2
.github/workflows/update-bundle.yml
vendored
2
.github/workflows/update-bundle.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: '20.x'
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install dependencies
|
||||
|
||||
99
.github/workflows/update-proxy-release.yml
vendored
99
.github/workflows/update-proxy-release.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update dependency proxy release assets
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag of CodeQL Bundle release that contains the proxy binaries as release assets"
|
||||
type: string
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update code and create PR
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # needed to push the updated files
|
||||
pull-requests: write # needed to create the PR
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.tag }}
|
||||
steps:
|
||||
- name: Check release tag format
|
||||
id: checks
|
||||
run: |
|
||||
if ! [[ $RELEASE_TAG =~ ^codeql-bundle-v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Invalid release tag: expected a CodeQL bundle tag in the 'codeql-bundle-vM.N.P' format."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "target_branch=dependency-proxy/$RELEASE_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check that the release exists
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
run: |
|
||||
(gh release view --repo "$GITHUB_REPOSITORY" --json "assets" "$RELEASE_TAG" && echo "Release found.") || exit 1
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||
ref: main
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release tag and version
|
||||
run: |
|
||||
NOW=$(date +"%Y%m%d%H%M%S") # only used to make sure we don't fetch stale binaries from the toolcache
|
||||
sed -i "s|https://github.com/github/codeql-action/releases/download/codeql-bundle-v[0-9.]\+/|https://github.com/github/codeql-action/releases/download/$RELEASE_TAG/|g" ./src/start-proxy-action.ts
|
||||
sed -i "s/\"v2.0.[0-9]\+\"/\"v2.0.$NOW\"/g" ./src/start-proxy-action.ts
|
||||
|
||||
- name: Compile TypeScript and commit changes
|
||||
env:
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
run: |
|
||||
set -exu
|
||||
git checkout -b "$TARGET_BRANCH"
|
||||
|
||||
npm run build
|
||||
git add ./src/start-proxy-action.ts
|
||||
git add ./lib
|
||||
git commit -m "Update release used by \`start-proxy\` action"
|
||||
|
||||
- name: Push changes and open PR
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
PR_FLAG: ${{ (github.event_name == 'workflow_dispatch' && '--draft') || '--dry-run' }}
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Update release used by \`start-proxy\` to \`$RELEASE_TAG\`"
|
||||
pr_body=$(cat << EOF
|
||||
This PR updates the \`start-proxy\` action to use the private registry proxy binaries that
|
||||
are attached as release assets to the \`$RELEASE_TAG\` release.
|
||||
|
||||
|
||||
Please do the following before merging:
|
||||
|
||||
- [ ] Verify that the changes to the code are correct.
|
||||
- [ ] Mark the PR as ready for review to trigger the CI.
|
||||
EOF
|
||||
)
|
||||
|
||||
git push origin "$TARGET_BRANCH"
|
||||
gh pr create \
|
||||
--head "$TARGET_BRANCH" \
|
||||
--base "main" \
|
||||
--title "${pr_title}" \
|
||||
--body "${pr_body}" \
|
||||
$PR_FLAG
|
||||
12
CHANGELOG.md
12
CHANGELOG.md
@@ -6,6 +6,18 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.30.8 - 10 Oct 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.30.7 - 06 Oct 2025
|
||||
|
||||
- [v4+ only] The CodeQL Action now runs on Node.js v24. [#3169](https://github.com/github/codeql-action/pull/3169)
|
||||
|
||||
## 3.30.6 - 02 Oct 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.2. [#3168](https://github.com/github/codeql-action/pull/3168)
|
||||
|
||||
## 3.30.5 - 26 Sep 2025
|
||||
|
||||
- We fixed a bug that was introduced in `3.30.4` with `upload-sarif` which resulted in files without a `.sarif` extension not getting uploaded. [#3160](https://github.com/github/codeql-action/pull/3160)
|
||||
|
||||
@@ -13,7 +13,7 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
||||
|
||||
## Development and Testing
|
||||
|
||||
Before you start, ensure that you have a recent version of node (16 or higher) installed, along with a recent version of npm (9.2 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||
Before you start, ensure that you have a recent version of node (24 or higher) installed, along with a recent version of npm (9.2 or higher). You can see which version of node is used by the action in `init/action.yml`.
|
||||
|
||||
### Common tasks
|
||||
|
||||
|
||||
@@ -62,7 +62,8 @@ For compiled languages:
|
||||
|
||||
The following versions of the CodeQL Action are currently supported:
|
||||
|
||||
- v3 (latest)
|
||||
- v4 (latest)
|
||||
- v3
|
||||
|
||||
## Supported versions of the CodeQL Bundle on GitHub Enterprise Server
|
||||
|
||||
|
||||
@@ -92,6 +92,6 @@ outputs:
|
||||
sarif-id:
|
||||
description: The ID of the uploaded SARIF file.
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: "../lib/analyze-action.js"
|
||||
post: "../lib/analyze-action-post.js"
|
||||
|
||||
@@ -15,5 +15,5 @@ inputs:
|
||||
$GITHUB_WORKSPACE as its working directory.
|
||||
required: false
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: '../lib/autobuild-action.js'
|
||||
|
||||
@@ -146,6 +146,12 @@ export default [
|
||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
"@typescript-eslint/restrict-template-expressions": "off",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"argsIgnorePattern": "^_",
|
||||
}
|
||||
],
|
||||
"func-style": "off",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -165,6 +165,6 @@ outputs:
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary used for analysis
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: '../lib/init-action.js'
|
||||
post: '../lib/init-action-post.js'
|
||||
|
||||
128
lib/analyze-action-post.js
generated
128
lib/analyze-action-post.js
generated
@@ -24680,6 +24680,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -24786,7 +24789,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -25121,8 +25142,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -25130,8 +25151,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -25140,8 +25161,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -25184,8 +25205,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -25194,8 +25215,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -25204,8 +25225,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -25214,8 +25235,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -25224,8 +25245,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -25234,8 +25255,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -25547,6 +25568,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug2("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug2("caret", comp);
|
||||
@@ -26131,12 +26153,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -26184,7 +26206,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -26244,7 +26266,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare(gt.semver, lt.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -26324,14 +26346,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -26355,7 +26377,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -26393,7 +26415,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -26438,7 +26460,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -26473,6 +26495,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -26486,14 +26509,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -26504,7 +26527,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -26517,7 +26540,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -28542,7 +28565,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -30261,13 +30284,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -30277,7 +30300,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -30293,27 +30316,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -95823,8 +95846,8 @@ var require_commonjs16 = __commonJS({
|
||||
if (rootPath === this.root.name) {
|
||||
return this.root;
|
||||
}
|
||||
for (const [compare, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare)) {
|
||||
for (const [compare2, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare2)) {
|
||||
return this.roots[rootPath] = root;
|
||||
}
|
||||
}
|
||||
@@ -95833,9 +95856,9 @@ var require_commonjs16 = __commonJS({
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
sameRoot(rootPath, compare = this.root.name) {
|
||||
sameRoot(rootPath, compare2 = this.root.name) {
|
||||
rootPath = rootPath.toUpperCase().replace(/\//g, "\\").replace(uncDriveRegexp, "$1\\");
|
||||
return rootPath === compare;
|
||||
return rootPath === compare2;
|
||||
}
|
||||
};
|
||||
exports2.PathWin32 = PathWin32;
|
||||
@@ -99854,7 +99877,7 @@ var require_b4a = __commonJS({
|
||||
function byteLength(string, encoding) {
|
||||
return Buffer.byteLength(string, encoding);
|
||||
}
|
||||
function compare(a, b) {
|
||||
function compare2(a, b) {
|
||||
return Buffer.compare(a, b);
|
||||
}
|
||||
function concat(buffers, totalLength) {
|
||||
@@ -99955,7 +99978,7 @@ var require_b4a = __commonJS({
|
||||
allocUnsafe,
|
||||
allocUnsafeSlow,
|
||||
byteLength,
|
||||
compare,
|
||||
compare: compare2,
|
||||
concat,
|
||||
copy,
|
||||
equals,
|
||||
@@ -117805,6 +117828,11 @@ function isSafeArtifactUpload(codeQlVersion) {
|
||||
|
||||
// src/feature-flags.ts
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -118671,7 +118699,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
||||
if (fs5.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs5.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
|
||||
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => path5.extname(f) === ".sarif");
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path5.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
|
||||
346
lib/analyze-action.js
generated
346
lib/analyze-action.js
generated
@@ -20602,14 +20602,14 @@ var require_dist_node4 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -20701,7 +20701,7 @@ var require_dist_node5 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node4();
|
||||
var import_request_error2 = require_dist_node4();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -20753,7 +20753,7 @@ var require_dist_node5 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -20764,7 +20764,7 @@ var require_dist_node5 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -20776,7 +20776,7 @@ var require_dist_node5 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -20796,7 +20796,7 @@ var require_dist_node5 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -20808,7 +20808,7 @@ var require_dist_node5 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -21250,14 +21250,14 @@ var require_dist_node7 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -21349,7 +21349,7 @@ var require_dist_node8 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node7();
|
||||
var import_request_error2 = require_dist_node7();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -21401,7 +21401,7 @@ var require_dist_node8 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -21412,7 +21412,7 @@ var require_dist_node8 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -21424,7 +21424,7 @@ var require_dist_node8 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -21444,7 +21444,7 @@ var require_dist_node8 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -21456,7 +21456,7 @@ var require_dist_node8 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -25039,7 +25039,7 @@ var require_to_regex_range = __commonJS({
|
||||
stop = countZeros(max + 1, zeros) - 1;
|
||||
}
|
||||
stops = [...stops];
|
||||
stops.sort(compare2);
|
||||
stops.sort(compare3);
|
||||
return stops;
|
||||
}
|
||||
function rangeToPattern(start, stop, options) {
|
||||
@@ -25111,7 +25111,7 @@ var require_to_regex_range = __commonJS({
|
||||
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
|
||||
return arr;
|
||||
}
|
||||
function compare2(a, b) {
|
||||
function compare3(a, b) {
|
||||
return a > b ? 1 : b > a ? -1 : 0;
|
||||
}
|
||||
function contains(arr, key, val2) {
|
||||
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
|
||||
async.read(path20, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync3(path20, optionsOrSettings) {
|
||||
function statSync4(path20, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path20, settings);
|
||||
}
|
||||
exports2.statSync = statSync3;
|
||||
exports2.statSync = statSync4;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -30529,6 +30529,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -30635,7 +30638,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -30970,8 +30991,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
var compare3 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare3;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -30979,8 +31000,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
var compare3 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare3(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -30989,8 +31010,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
var compare3 = require_compare();
|
||||
var compareLoose = (a, b) => compare3(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -31033,8 +31054,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
var compare3 = require_compare();
|
||||
var gt = (a, b, loose) => compare3(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -31043,8 +31064,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
var compare3 = require_compare();
|
||||
var lt = (a, b, loose) => compare3(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -31053,8 +31074,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
var compare3 = require_compare();
|
||||
var eq = (a, b, loose) => compare3(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -31063,8 +31084,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
var compare3 = require_compare();
|
||||
var neq = (a, b, loose) => compare3(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -31073,8 +31094,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
var compare3 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare3(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -31083,8 +31104,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
var compare3 = require_compare();
|
||||
var lte = (a, b, loose) => compare3(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -31396,6 +31417,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug3("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug3("caret", comp);
|
||||
@@ -31980,12 +32002,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
const v = versions.sort((a, b) => compare3(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -32033,7 +32055,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -32093,7 +32115,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
gtltComp = compare3(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -32173,14 +32195,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
const comp = compare3(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
const comp = compare3(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -32204,7 +32226,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -32242,7 +32264,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare: compare2,
|
||||
compare: compare3,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -32287,7 +32309,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -32322,6 +32344,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -32335,14 +32358,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -32353,7 +32376,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -32366,7 +32389,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -33745,14 +33768,14 @@ var require_dist_node14 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -33854,7 +33877,7 @@ var require_dist_node15 = __commonJS({
|
||||
throw error2;
|
||||
}
|
||||
var import_light = __toESM2(require_light());
|
||||
var import_request_error = require_dist_node14();
|
||||
var import_request_error2 = require_dist_node14();
|
||||
async function wrapRequest(state, octokit, request, options) {
|
||||
const limiter = new import_light.default();
|
||||
limiter.on("failed", function(error2, info4) {
|
||||
@@ -33875,7 +33898,7 @@ var require_dist_node15 = __commonJS({
|
||||
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
|
||||
response.data.errors[0].message
|
||||
)) {
|
||||
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
|
||||
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
|
||||
request: options,
|
||||
response
|
||||
});
|
||||
@@ -34391,7 +34414,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -36110,13 +36133,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
exports2.compare = compare3;
|
||||
function compare3(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare2(a, b, true);
|
||||
return compare3(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -36126,7 +36149,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare2(b, a, loose);
|
||||
return compare3(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -36142,27 +36165,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare2(a, b, loose) > 0;
|
||||
return compare3(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare2(a, b, loose) < 0;
|
||||
return compare3(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare2(a, b, loose) === 0;
|
||||
return compare3(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare2(a, b, loose) !== 0;
|
||||
return compare3(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare2(a, b, loose) >= 0;
|
||||
return compare3(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare2(a, b, loose) <= 0;
|
||||
return compare3(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -89773,6 +89796,9 @@ function isGoodVersion(versionSpec) {
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function shouldSkipSarifUpload() {
|
||||
return isInTestMode() || process.env["CODEQL_ACTION_SKIP_SARIF_UPLOAD" /* SKIP_SARIF_UPLOAD */] === "true";
|
||||
}
|
||||
function getTestingEnvironment() {
|
||||
const testingEnvironment = process.env["CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */] || "";
|
||||
if (testingEnvironment === "") {
|
||||
@@ -90039,9 +90065,12 @@ function getWorkflowRunAttempt() {
|
||||
function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
function isDynamicWorkflow() {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
return isDynamicWorkflow();
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
|
||||
}
|
||||
@@ -90167,6 +90196,7 @@ var CodeScanning = {
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
@@ -90175,6 +90205,7 @@ var CodeQuality = {
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
|
||||
@@ -90362,6 +90393,45 @@ var path14 = __toESM(require("path"));
|
||||
var core10 = __toESM(require_core());
|
||||
var toolrunner3 = __toESM(require_toolrunner());
|
||||
|
||||
// node_modules/@octokit/request-error/dist-src/index.js
|
||||
var RequestError = class extends Error {
|
||||
name;
|
||||
/**
|
||||
* http status code
|
||||
*/
|
||||
status;
|
||||
/**
|
||||
* Request options that lead to the error.
|
||||
*/
|
||||
request;
|
||||
/**
|
||||
* Response object if a response was received
|
||||
*/
|
||||
response;
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
this.name = "HttpError";
|
||||
this.status = Number.parseInt(statusCode);
|
||||
if (Number.isNaN(this.status)) {
|
||||
this.status = 0;
|
||||
}
|
||||
if ("response" in options) {
|
||||
this.response = options.response;
|
||||
}
|
||||
const requestCopy = Object.assign({}, options.request);
|
||||
if (options.request.headers.authorization) {
|
||||
requestCopy.headers = Object.assign({}, options.request.headers, {
|
||||
authorization: options.request.headers.authorization.replace(
|
||||
/(?<! ) .*$/,
|
||||
" [REDACTED]"
|
||||
)
|
||||
});
|
||||
}
|
||||
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||||
this.request = requestCopy;
|
||||
}
|
||||
};
|
||||
|
||||
// src/cli-errors.ts
|
||||
var SUPPORTED_PLATFORMS = [
|
||||
["linux", "x64"],
|
||||
@@ -90634,8 +90704,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var crypto = __toESM(require("crypto"));
|
||||
@@ -91047,6 +91117,11 @@ var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||
var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0";
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -92109,6 +92184,7 @@ var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
var CODEQL_TOOLCACHE_INPUT = "toolcache";
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -92250,7 +92326,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
@@ -92287,6 +92363,40 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
"`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required."
|
||||
);
|
||||
}
|
||||
} else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) {
|
||||
let latestToolcacheVersion;
|
||||
const allowToolcacheValueFF = await features.getValue(
|
||||
"allow_toolcache_input" /* AllowToolcacheInput */
|
||||
);
|
||||
const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode());
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
latestToolcacheVersion = getLatestToolcacheVersion(logger);
|
||||
if (latestToolcacheVersion) {
|
||||
cliVersion2 = latestToolcacheVersion;
|
||||
}
|
||||
}
|
||||
if (latestToolcacheVersion === void 0) {
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...`
|
||||
);
|
||||
} else {
|
||||
if (allowToolcacheValueFF) {
|
||||
logger.warning(
|
||||
`Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.`
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Ignoring 'tools: ${toolsInput}' because the feature is not enabled.`
|
||||
);
|
||||
}
|
||||
}
|
||||
cliVersion2 = defaultCliVersion.cliVersion;
|
||||
tagName = defaultCliVersion.tagName;
|
||||
}
|
||||
} else if (toolsInput !== void 0) {
|
||||
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||
url2 = toolsInput;
|
||||
@@ -92495,7 +92605,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
|
||||
}
|
||||
return cliVersion2;
|
||||
}
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
if (!await isBinaryAccessible("tar", logger)) {
|
||||
throw new ConfigurationError(
|
||||
"Could not find tar in PATH, so unable to extract CodeQL bundle."
|
||||
@@ -92508,6 +92618,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
||||
apiDetails,
|
||||
variant,
|
||||
zstdAvailability.available,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
let codeqlFolder;
|
||||
@@ -92593,8 +92704,24 @@ async function getNightlyToolsUrl(logger) {
|
||||
);
|
||||
}
|
||||
}
|
||||
function getLatestToolcacheVersion(logger) {
|
||||
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver7.compare(b, a));
|
||||
logger.debug(
|
||||
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
|
||||
allVersions
|
||||
)}.`
|
||||
);
|
||||
if (allVersions.length > 0) {
|
||||
const latestToolcacheVersion = allVersions[0];
|
||||
logger.info(
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`
|
||||
);
|
||||
return latestToolcacheVersion;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT;
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
@@ -92649,7 +92776,7 @@ var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
||||
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
||||
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
|
||||
try {
|
||||
const {
|
||||
codeqlFolder,
|
||||
@@ -92663,6 +92790,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
logger.debug(
|
||||
@@ -92687,7 +92815,8 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
zstdAvailability
|
||||
};
|
||||
} catch (e) {
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
|
||||
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
|
||||
throw new ErrorClass(
|
||||
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
|
||||
|
||||
@@ -93338,6 +93467,7 @@ async function makeGlobber(patterns) {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
const status = [];
|
||||
for (const language of config.languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === void 0) {
|
||||
@@ -93348,6 +93478,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
}
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, result: "no-hash" /* NoHash */ });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
|
||||
);
|
||||
@@ -93355,6 +93486,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
}
|
||||
const size = await getTotalCacheSize(cacheConfig.paths, logger, true);
|
||||
if (size === 0) {
|
||||
status.push({ language, result: "empty" /* Empty */ });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} since it is empty.`
|
||||
);
|
||||
@@ -93365,18 +93497,28 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
`Uploading cache of size ${size} for ${language} with key ${key}...`
|
||||
);
|
||||
try {
|
||||
const start = performance.now();
|
||||
await actionsCache3.saveCache(cacheConfig.paths, key);
|
||||
const upload_duration_ms = Math.round(performance.now() - start);
|
||||
status.push({
|
||||
language,
|
||||
result: "stored" /* Stored */,
|
||||
upload_size_bytes: Math.round(size),
|
||||
upload_duration_ms
|
||||
});
|
||||
} catch (error2) {
|
||||
if (error2 instanceof actionsCache3.ReserveCacheError) {
|
||||
logger.info(
|
||||
`Not uploading cache for ${language}, because ${key} is already in use.`
|
||||
);
|
||||
logger.debug(error2.message);
|
||||
status.push({ language, result: "duplicate" /* Duplicate */ });
|
||||
} else {
|
||||
throw error2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
|
||||
const hash2 = await glob.hashFiles(cacheConfig.hash.join("\n"));
|
||||
@@ -93826,7 +93968,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.info(`Interpreting ${analysis.name} results for ${language}`);
|
||||
let category = automationDetailsId;
|
||||
if (analysis.kind === "code-quality" /* CodeQuality */) {
|
||||
category = fixCodeQualityCategory(logger, automationDetailsId);
|
||||
category = analysis.fixCategory(logger, automationDetailsId);
|
||||
}
|
||||
const sarifFile = path16.join(
|
||||
sarifFolder,
|
||||
@@ -94048,7 +94190,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -94071,7 +94213,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
}
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes;
|
||||
@@ -94108,6 +94250,9 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`
|
||||
);
|
||||
if (isInTestMode()) {
|
||||
throw e;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
@@ -94746,7 +94891,7 @@ LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) {
|
||||
};
|
||||
LongPrototype.gte = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.ge = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.compare = function compare(other) {
|
||||
LongPrototype.compare = function compare2(other) {
|
||||
if (!isLong(other)) other = fromValue(other);
|
||||
if (this.eq(other)) return 0;
|
||||
var thisNeg = this.isNegative(), otherNeg = other.isNegative();
|
||||
@@ -95297,7 +95442,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
|
||||
// src/init.ts
|
||||
var toolrunner4 = __toESM(require_toolrunner());
|
||||
var io6 = __toESM(require_io());
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const {
|
||||
codeql,
|
||||
@@ -95311,6 +95456,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
true
|
||||
);
|
||||
@@ -95457,6 +95603,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
|
||||
tempDir,
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
codeQL = initCodeQLResult.codeql;
|
||||
@@ -95512,23 +95659,23 @@ function getAutomationID2(category, analysis_key, environment) {
|
||||
}
|
||||
return computeAutomationID(analysis_key, environment);
|
||||
}
|
||||
async function uploadPayload(payload, repositoryNwo, logger, target) {
|
||||
async function uploadPayload(payload, repositoryNwo, logger, analysis) {
|
||||
logger.info("Uploading results");
|
||||
if (isInTestMode()) {
|
||||
if (shouldSkipSarifUpload()) {
|
||||
const payloadSaveFile = path18.join(
|
||||
getTemporaryDirectory(),
|
||||
"payload.json"
|
||||
`payload-${analysis.kind}.json`
|
||||
);
|
||||
logger.info(
|
||||
`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`
|
||||
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
|
||||
);
|
||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||
fs18.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||
return "test-mode-sarif-id";
|
||||
return "dummy-sarif-id";
|
||||
}
|
||||
const client = getApiClient();
|
||||
try {
|
||||
const response = await client.request(target, {
|
||||
const response = await client.request(analysis.target, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
data: payload
|
||||
@@ -95699,6 +95846,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -95761,7 +95909,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
uploadTarget
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
@@ -95941,7 +96089,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
}
|
||||
|
||||
// src/analyze-action.ts
|
||||
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, logger) {
|
||||
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, dependencyCacheResults, logger) {
|
||||
const status = getActionsStatus(error2, stats?.analyze_failure_language);
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"finish" /* Analyze */,
|
||||
@@ -95958,7 +96106,8 @@ async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUplo
|
||||
...statusReportBase,
|
||||
...stats || {},
|
||||
...dbCreationTimings || {},
|
||||
...trapCacheCleanup || {}
|
||||
...trapCacheCleanup || {},
|
||||
dependency_caching_upload_results: dependencyCacheResults
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport = {
|
||||
@@ -96039,6 +96188,7 @@ async function run() {
|
||||
let trapCacheUploadTime = void 0;
|
||||
let dbCreationTimings = void 0;
|
||||
let didUploadTrapCaches = false;
|
||||
let dependencyCacheResults;
|
||||
initializeEnvironment(getActionVersion());
|
||||
persistInputs();
|
||||
const logger = getActionsLogger();
|
||||
@@ -96146,16 +96296,14 @@ async function run() {
|
||||
core14.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = CodeQuality;
|
||||
const qualityUploadResult = await uploadFiles(
|
||||
outputDir,
|
||||
getRequiredInput("checkout_path"),
|
||||
fixCodeQualityCategory(
|
||||
logger,
|
||||
getOptionalInput("category")
|
||||
),
|
||||
getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
CodeQuality
|
||||
analysis
|
||||
);
|
||||
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
@@ -96177,7 +96325,11 @@ async function run() {
|
||||
"java_minimize_dependency_jars" /* JavaMinimizeDependencyJars */,
|
||||
codeql
|
||||
);
|
||||
await uploadDependencyCaches(config, logger, minimizeJavaJars);
|
||||
dependencyCacheResults = await uploadDependencyCaches(
|
||||
config,
|
||||
logger,
|
||||
minimizeJavaJars
|
||||
);
|
||||
}
|
||||
if (isInTestMode()) {
|
||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||
@@ -96208,6 +96360,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
return;
|
||||
@@ -96225,6 +96378,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
} else if (runStats) {
|
||||
@@ -96237,6 +96391,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
@@ -96249,6 +96404,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
125
lib/autobuild-action.js
generated
125
lib/autobuild-action.js
generated
@@ -24680,6 +24680,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -24786,7 +24789,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -25121,8 +25142,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -25130,8 +25151,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -25140,8 +25161,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -25184,8 +25205,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -25194,8 +25215,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -25204,8 +25225,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -25214,8 +25235,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -25224,8 +25245,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -25234,8 +25255,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -25547,6 +25568,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug3("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug3("caret", comp);
|
||||
@@ -26131,12 +26153,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -26184,7 +26206,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -26244,7 +26266,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare(gt.semver, lt.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -26324,14 +26346,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -26355,7 +26377,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -26393,7 +26415,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -26438,7 +26460,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -26473,6 +26495,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -26486,14 +26509,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -26504,7 +26527,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -26517,7 +26540,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -28542,7 +28565,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -30261,13 +30284,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -30277,7 +30300,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -30293,27 +30316,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -78295,8 +78318,8 @@ var path3 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs2 = __toESM(require("fs"));
|
||||
@@ -78543,6 +78566,11 @@ function isSupportedToolsFeature(versionInfo, feature) {
|
||||
var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||
var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -79812,7 +79840,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -79835,7 +79863,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
}
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes;
|
||||
@@ -79872,6 +79900,9 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`
|
||||
);
|
||||
if (isInTestMode()) {
|
||||
throw e;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.1",
|
||||
"cliVersion": "2.23.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.0",
|
||||
"priorCliVersion": "2.23.0"
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
}
|
||||
|
||||
399
lib/init-action-post.js
generated
399
lib/init-action-post.js
generated
File diff suppressed because it is too large
Load Diff
313
lib/init-action.js
generated
313
lib/init-action.js
generated
@@ -19971,6 +19971,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -20077,7 +20080,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -20412,8 +20433,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -20421,8 +20442,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -20431,8 +20452,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -20475,8 +20496,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -20485,8 +20506,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt2 = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt2 = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt2;
|
||||
}
|
||||
});
|
||||
@@ -20495,8 +20516,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -20505,8 +20526,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -20515,8 +20536,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -20525,8 +20546,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -20838,6 +20859,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug3("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug3("caret", comp);
|
||||
@@ -21422,12 +21444,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -21475,7 +21497,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -21535,7 +21557,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt2) {
|
||||
gtltComp = compare(gt.semver, lt2.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt2.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt2.operator !== "<=")) {
|
||||
@@ -21615,14 +21637,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -21646,7 +21668,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -21684,7 +21706,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -22510,14 +22532,14 @@ var require_dist_node4 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -22609,7 +22631,7 @@ var require_dist_node5 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node4();
|
||||
var import_request_error2 = require_dist_node4();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -22661,7 +22683,7 @@ var require_dist_node5 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -22672,7 +22694,7 @@ var require_dist_node5 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -22684,7 +22706,7 @@ var require_dist_node5 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -22704,7 +22726,7 @@ var require_dist_node5 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -22716,7 +22738,7 @@ var require_dist_node5 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -23158,14 +23180,14 @@ var require_dist_node7 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -23257,7 +23279,7 @@ var require_dist_node8 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node7();
|
||||
var import_request_error2 = require_dist_node7();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -23309,7 +23331,7 @@ var require_dist_node8 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -23320,7 +23342,7 @@ var require_dist_node8 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -23332,7 +23354,7 @@ var require_dist_node8 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url,
|
||||
status,
|
||||
@@ -23352,7 +23374,7 @@ var require_dist_node8 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -23364,7 +23386,7 @@ var require_dist_node8 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -26947,7 +26969,7 @@ var require_to_regex_range = __commonJS({
|
||||
stop = countZeros(max + 1, zeros) - 1;
|
||||
}
|
||||
stops = [...stops];
|
||||
stops.sort(compare);
|
||||
stops.sort(compare2);
|
||||
return stops;
|
||||
}
|
||||
function rangeToPattern(start, stop, options) {
|
||||
@@ -27019,7 +27041,7 @@ var require_to_regex_range = __commonJS({
|
||||
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
|
||||
return arr;
|
||||
}
|
||||
function compare(a, b) {
|
||||
function compare2(a, b) {
|
||||
return a > b ? 1 : b > a ? -1 : 0;
|
||||
}
|
||||
function contains(arr, key, val2) {
|
||||
@@ -32287,7 +32309,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -32322,6 +32344,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -32335,14 +32358,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -32353,7 +32376,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -32366,7 +32389,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -33745,14 +33768,14 @@ var require_dist_node14 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -33854,7 +33877,7 @@ var require_dist_node15 = __commonJS({
|
||||
throw error2;
|
||||
}
|
||||
var import_light = __toESM2(require_light());
|
||||
var import_request_error = require_dist_node14();
|
||||
var import_request_error2 = require_dist_node14();
|
||||
async function wrapRequest(state, octokit, request, options) {
|
||||
const limiter = new import_light.default();
|
||||
limiter.on("failed", function(error2, info4) {
|
||||
@@ -33875,7 +33898,7 @@ var require_dist_node15 = __commonJS({
|
||||
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
|
||||
response.data.errors[0].message
|
||||
)) {
|
||||
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
|
||||
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
|
||||
request: options,
|
||||
response
|
||||
});
|
||||
@@ -34391,7 +34414,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -36110,13 +36133,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -36126,7 +36149,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -36142,27 +36165,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt2;
|
||||
function lt2(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -85943,9 +85966,12 @@ var getFileType = async (filePath) => {
|
||||
function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
function isDynamicWorkflow() {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
return isDynamicWorkflow();
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
|
||||
}
|
||||
@@ -86563,8 +86589,8 @@ var path9 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var crypto = __toESM(require("crypto"));
|
||||
@@ -86977,6 +87003,11 @@ var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||
var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0";
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -87860,7 +87891,13 @@ async function getOverlayDatabaseMode(codeql, repository, features, languages, s
|
||||
return nonOverlayAnalysis;
|
||||
}
|
||||
if (buildMode !== "none" /* None */ && (await Promise.all(
|
||||
languages.map(async (l) => await codeql.isTracedLanguage(l))
|
||||
languages.map(
|
||||
async (l) => l !== "go" /* go */ && // Workaround to allow overlay analysis for Go with any build
|
||||
// mode, since it does not yet support BMN. The Go autobuilder and/or extractor will
|
||||
// ensure that overlay-base databases are only created for supported Go build setups,
|
||||
// and that we'll fall back to full databases in other cases.
|
||||
await codeql.isTracedLanguage(l)
|
||||
)
|
||||
)).some(Boolean)) {
|
||||
logger.warning(
|
||||
`Cannot build an ${overlayDatabaseMode} database because build-mode is set to "${buildMode}" instead of "none". Falling back to creating a normal full database instead.`
|
||||
@@ -88184,7 +88221,7 @@ async function makeGlobber(patterns) {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
const restoredCaches = [];
|
||||
const status = [];
|
||||
for (const language of languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === void 0) {
|
||||
@@ -88195,6 +88232,7 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
}
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, hit_kind: "no-hash" /* NoHash */ });
|
||||
logger.info(
|
||||
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
|
||||
);
|
||||
@@ -88209,19 +88247,23 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
", "
|
||||
)}`
|
||||
);
|
||||
const start = performance.now();
|
||||
const hitKey = await actionsCache3.restoreCache(
|
||||
cacheConfig.paths,
|
||||
primaryKey,
|
||||
restoreKeys
|
||||
);
|
||||
const download_duration_ms = Math.round(performance.now() - start);
|
||||
if (hitKey !== void 0) {
|
||||
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
|
||||
restoredCaches.push(language);
|
||||
const hit_kind = hitKey === primaryKey ? "exact" /* Exact */ : "partial" /* Partial */;
|
||||
status.push({ language, hit_kind, download_duration_ms });
|
||||
} else {
|
||||
status.push({ language, hit_kind: "miss" /* Miss */ });
|
||||
logger.info(`No suitable cache found for ${language}.`);
|
||||
}
|
||||
}
|
||||
return restoredCaches;
|
||||
return status;
|
||||
}
|
||||
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
|
||||
const hash = await glob.hashFiles(cacheConfig.hash.join("\n"));
|
||||
@@ -88319,6 +88361,45 @@ var path16 = __toESM(require("path"));
|
||||
var core10 = __toESM(require_core());
|
||||
var toolrunner3 = __toESM(require_toolrunner());
|
||||
|
||||
// node_modules/@octokit/request-error/dist-src/index.js
|
||||
var RequestError = class extends Error {
|
||||
name;
|
||||
/**
|
||||
* http status code
|
||||
*/
|
||||
status;
|
||||
/**
|
||||
* Request options that lead to the error.
|
||||
*/
|
||||
request;
|
||||
/**
|
||||
* Response object if a response was received
|
||||
*/
|
||||
response;
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
this.name = "HttpError";
|
||||
this.status = Number.parseInt(statusCode);
|
||||
if (Number.isNaN(this.status)) {
|
||||
this.status = 0;
|
||||
}
|
||||
if ("response" in options) {
|
||||
this.response = options.response;
|
||||
}
|
||||
const requestCopy = Object.assign({}, options.request);
|
||||
if (options.request.headers.authorization) {
|
||||
requestCopy.headers = Object.assign({}, options.request.headers, {
|
||||
authorization: options.request.headers.authorization.replace(
|
||||
/(?<! ) .*$/,
|
||||
" [REDACTED]"
|
||||
)
|
||||
});
|
||||
}
|
||||
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||||
this.request = requestCopy;
|
||||
}
|
||||
};
|
||||
|
||||
// src/cli-errors.ts
|
||||
var SUPPORTED_PLATFORMS = [
|
||||
["linux", "x64"],
|
||||
@@ -88881,6 +88962,7 @@ var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
var CODEQL_TOOLCACHE_INPUT = "toolcache";
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -89022,7 +89104,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
@@ -89059,6 +89141,40 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
"`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required."
|
||||
);
|
||||
}
|
||||
} else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) {
|
||||
let latestToolcacheVersion;
|
||||
const allowToolcacheValueFF = await features.getValue(
|
||||
"allow_toolcache_input" /* AllowToolcacheInput */
|
||||
);
|
||||
const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode());
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
latestToolcacheVersion = getLatestToolcacheVersion(logger);
|
||||
if (latestToolcacheVersion) {
|
||||
cliVersion2 = latestToolcacheVersion;
|
||||
}
|
||||
}
|
||||
if (latestToolcacheVersion === void 0) {
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...`
|
||||
);
|
||||
} else {
|
||||
if (allowToolcacheValueFF) {
|
||||
logger.warning(
|
||||
`Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.`
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Ignoring 'tools: ${toolsInput}' because the feature is not enabled.`
|
||||
);
|
||||
}
|
||||
}
|
||||
cliVersion2 = defaultCliVersion.cliVersion;
|
||||
tagName = defaultCliVersion.tagName;
|
||||
}
|
||||
} else if (toolsInput !== void 0) {
|
||||
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||
url = toolsInput;
|
||||
@@ -89267,7 +89383,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
|
||||
}
|
||||
return cliVersion2;
|
||||
}
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
if (!await isBinaryAccessible("tar", logger)) {
|
||||
throw new ConfigurationError(
|
||||
"Could not find tar in PATH, so unable to extract CodeQL bundle."
|
||||
@@ -89280,6 +89396,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
||||
apiDetails,
|
||||
variant,
|
||||
zstdAvailability.available,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
let codeqlFolder;
|
||||
@@ -89365,8 +89482,24 @@ async function getNightlyToolsUrl(logger) {
|
||||
);
|
||||
}
|
||||
}
|
||||
function getLatestToolcacheVersion(logger) {
|
||||
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver7.compare(b, a));
|
||||
logger.debug(
|
||||
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
|
||||
allVersions
|
||||
)}.`
|
||||
);
|
||||
if (allVersions.length > 0) {
|
||||
const latestToolcacheVersion = allVersions[0];
|
||||
logger.info(
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`
|
||||
);
|
||||
return latestToolcacheVersion;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT;
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
@@ -89410,7 +89543,7 @@ var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
||||
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
||||
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
|
||||
try {
|
||||
const {
|
||||
codeqlFolder,
|
||||
@@ -89424,6 +89557,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
logger.debug(
|
||||
@@ -89448,7 +89582,8 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
zstdAvailability
|
||||
};
|
||||
} catch (e) {
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
|
||||
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
|
||||
throw new ErrorClass(
|
||||
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
|
||||
|
||||
@@ -89996,7 +90131,7 @@ async function getJobRunUuidSarifOptions(codeql) {
|
||||
}
|
||||
|
||||
// src/init.ts
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const {
|
||||
codeql,
|
||||
@@ -90010,6 +90145,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
true
|
||||
);
|
||||
@@ -90209,7 +90345,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -90232,7 +90368,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
}
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes;
|
||||
@@ -90269,6 +90405,9 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`
|
||||
);
|
||||
if (isInTestMode()) {
|
||||
throw e;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
@@ -90324,7 +90463,7 @@ async function sendStatusReport(statusReport) {
|
||||
);
|
||||
}
|
||||
}
|
||||
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats) {
|
||||
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats, dependencyCachingResults) {
|
||||
const languages = config.languages.join(",");
|
||||
const paths = (config.originalUserInput.paths || []).join(",");
|
||||
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(
|
||||
@@ -90361,6 +90500,7 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config
|
||||
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
|
||||
overlay_base_database_download_size_bytes: overlayBaseDatabaseStats?.databaseSizeBytes,
|
||||
overlay_base_database_download_duration_ms: overlayBaseDatabaseStats?.databaseDownloadDurationMs,
|
||||
dependency_caching_restore_results: dependencyCachingResults,
|
||||
query_filters: JSON.stringify(
|
||||
config.originalUserInput["query-filters"] ?? []
|
||||
),
|
||||
@@ -90543,7 +90683,7 @@ async function getWorkflowAbsolutePath(logger) {
|
||||
}
|
||||
|
||||
// src/init-action.ts
|
||||
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, logger, error2) {
|
||||
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, dependencyCachingResults, logger, error2) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"init" /* Init */,
|
||||
getActionsStatus(error2),
|
||||
@@ -90580,7 +90720,8 @@ async function sendCompletedStatusReport(startedAt, config, configFile, toolsDow
|
||||
Math.round(
|
||||
await getTotalCacheSize(Object.values(config.trapCaches), logger)
|
||||
),
|
||||
overlayBaseDatabaseStats
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults
|
||||
);
|
||||
await sendStatusReport({
|
||||
...initWithConfigStatusReport,
|
||||
@@ -90653,6 +90794,7 @@ async function run() {
|
||||
getTemporaryDirectory(),
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
@@ -90744,6 +90886,7 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
let overlayBaseDatabaseStats;
|
||||
let dependencyCachingResults;
|
||||
try {
|
||||
if (config.overlayDatabaseMode === "overlay" /* Overlay */ && config.useOverlayDatabaseCaching) {
|
||||
overlayBaseDatabaseStats = await downloadOverlayBaseDatabaseFromCache(
|
||||
@@ -90888,7 +91031,7 @@ exec ${goBinaryPath} "$@"`
|
||||
codeql
|
||||
);
|
||||
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
|
||||
await downloadDependencyCaches(
|
||||
dependencyCachingResults = await downloadDependencyCaches(
|
||||
config.languages,
|
||||
logger,
|
||||
minimizeJavaJars
|
||||
@@ -90993,6 +91136,7 @@ exec ${goBinaryPath} "$@"`
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
error2
|
||||
);
|
||||
@@ -91010,6 +91154,7 @@ exec ${goBinaryPath} "$@"`
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
121
lib/resolve-environment-action.js
generated
121
lib/resolve-environment-action.js
generated
@@ -24680,6 +24680,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -24786,7 +24789,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -25121,8 +25142,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -25130,8 +25151,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -25140,8 +25161,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -25184,8 +25205,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -25194,8 +25215,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -25204,8 +25225,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -25214,8 +25235,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -25224,8 +25245,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -25234,8 +25255,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -25547,6 +25568,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug3("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug3("caret", comp);
|
||||
@@ -26131,12 +26153,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -26184,7 +26206,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -26244,7 +26266,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare(gt.semver, lt.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -26324,14 +26346,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -26355,7 +26377,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -26393,7 +26415,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -26438,7 +26460,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -26473,6 +26495,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -26486,14 +26509,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -26504,7 +26527,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -26517,7 +26540,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -28542,7 +28565,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -30261,13 +30284,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -30277,7 +30300,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -30293,27 +30316,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -78534,6 +78557,11 @@ function isSupportedToolsFeature(versionInfo, feature) {
|
||||
|
||||
// src/feature-flags.ts
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -79439,7 +79467,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -79462,7 +79490,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
}
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes;
|
||||
@@ -79499,6 +79527,9 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`
|
||||
);
|
||||
if (isInTestMode()) {
|
||||
throw e;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
}
|
||||
|
||||
126
lib/start-proxy-action-post.js
generated
126
lib/start-proxy-action-post.js
generated
@@ -24680,6 +24680,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -24786,7 +24789,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -25121,8 +25142,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -25130,8 +25151,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -25140,8 +25161,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -25184,8 +25205,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -25194,8 +25215,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -25204,8 +25225,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -25214,8 +25235,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -25224,8 +25245,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -25234,8 +25255,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -25547,6 +25568,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug2("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug2("caret", comp);
|
||||
@@ -26131,12 +26153,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -26184,7 +26206,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -26244,7 +26266,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare(gt.semver, lt.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -26324,14 +26346,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -26355,7 +26377,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -26393,7 +26415,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -26438,7 +26460,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -26473,6 +26495,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -26486,14 +26509,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -26504,7 +26527,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -26517,7 +26540,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -28542,7 +28565,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -30261,13 +30284,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -30277,7 +30300,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -30293,27 +30316,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -94483,8 +94506,8 @@ var require_commonjs16 = __commonJS({
|
||||
if (rootPath === this.root.name) {
|
||||
return this.root;
|
||||
}
|
||||
for (const [compare, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare)) {
|
||||
for (const [compare2, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare2)) {
|
||||
return this.roots[rootPath] = root;
|
||||
}
|
||||
}
|
||||
@@ -94493,9 +94516,9 @@ var require_commonjs16 = __commonJS({
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
sameRoot(rootPath, compare = this.root.name) {
|
||||
sameRoot(rootPath, compare2 = this.root.name) {
|
||||
rootPath = rootPath.toUpperCase().replace(/\//g, "\\").replace(uncDriveRegexp, "$1\\");
|
||||
return rootPath === compare;
|
||||
return rootPath === compare2;
|
||||
}
|
||||
};
|
||||
exports2.PathWin32 = PathWin32;
|
||||
@@ -98514,7 +98537,7 @@ var require_b4a = __commonJS({
|
||||
function byteLength(string, encoding) {
|
||||
return Buffer.byteLength(string, encoding);
|
||||
}
|
||||
function compare(a, b) {
|
||||
function compare2(a, b) {
|
||||
return Buffer.compare(a, b);
|
||||
}
|
||||
function concat(buffers, totalLength) {
|
||||
@@ -98615,7 +98638,7 @@ var require_b4a = __commonJS({
|
||||
allocUnsafe,
|
||||
allocUnsafeSlow,
|
||||
byteLength,
|
||||
compare,
|
||||
compare: compare2,
|
||||
concat,
|
||||
copy,
|
||||
equals,
|
||||
@@ -117214,6 +117237,11 @@ var semver3 = __toESM(require_semver2());
|
||||
|
||||
// src/feature-flags.ts
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
|
||||
47584
lib/start-proxy-action.js
generated
47584
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
410
lib/upload-lib.js
generated
410
lib/upload-lib.js
generated
@@ -21899,14 +21899,14 @@ var require_dist_node4 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -21998,7 +21998,7 @@ var require_dist_node5 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node4();
|
||||
var import_request_error2 = require_dist_node4();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -22050,7 +22050,7 @@ var require_dist_node5 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22061,7 +22061,7 @@ var require_dist_node5 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22073,7 +22073,7 @@ var require_dist_node5 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22093,7 +22093,7 @@ var require_dist_node5 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -22105,7 +22105,7 @@ var require_dist_node5 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -22547,14 +22547,14 @@ var require_dist_node7 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -22646,7 +22646,7 @@ var require_dist_node8 = __commonJS({
|
||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||
}
|
||||
var import_request_error = require_dist_node7();
|
||||
var import_request_error2 = require_dist_node7();
|
||||
function getBufferResponse(response) {
|
||||
return response.arrayBuffer();
|
||||
}
|
||||
@@ -22698,7 +22698,7 @@ var require_dist_node8 = __commonJS({
|
||||
if (status < 400) {
|
||||
return;
|
||||
}
|
||||
throw new import_request_error.RequestError(response.statusText, status, {
|
||||
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22709,7 +22709,7 @@ var require_dist_node8 = __commonJS({
|
||||
});
|
||||
}
|
||||
if (status === 304) {
|
||||
throw new import_request_error.RequestError("Not modified", status, {
|
||||
throw new import_request_error2.RequestError("Not modified", status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22721,7 +22721,7 @@ var require_dist_node8 = __commonJS({
|
||||
}
|
||||
if (status >= 400) {
|
||||
const data = await getResponseData(response);
|
||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
||||
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||
response: {
|
||||
url: url2,
|
||||
status,
|
||||
@@ -22741,7 +22741,7 @@ var require_dist_node8 = __commonJS({
|
||||
data
|
||||
};
|
||||
}).catch((error2) => {
|
||||
if (error2 instanceof import_request_error.RequestError)
|
||||
if (error2 instanceof import_request_error2.RequestError)
|
||||
throw error2;
|
||||
else if (error2.name === "AbortError")
|
||||
throw error2;
|
||||
@@ -22753,7 +22753,7 @@ var require_dist_node8 = __commonJS({
|
||||
message = error2.cause;
|
||||
}
|
||||
}
|
||||
throw new import_request_error.RequestError(message, 500, {
|
||||
throw new import_request_error2.RequestError(message, 500, {
|
||||
request: requestOptions
|
||||
});
|
||||
});
|
||||
@@ -26336,7 +26336,7 @@ var require_to_regex_range = __commonJS({
|
||||
stop = countZeros(max + 1, zeros) - 1;
|
||||
}
|
||||
stops = [...stops];
|
||||
stops.sort(compare2);
|
||||
stops.sort(compare3);
|
||||
return stops;
|
||||
}
|
||||
function rangeToPattern(start, stop, options) {
|
||||
@@ -26408,7 +26408,7 @@ var require_to_regex_range = __commonJS({
|
||||
for (let i = 0; i < a.length; i++) arr.push([a[i], b[i]]);
|
||||
return arr;
|
||||
}
|
||||
function compare2(a, b) {
|
||||
function compare3(a, b) {
|
||||
return a > b ? 1 : b > a ? -1 : 0;
|
||||
}
|
||||
function contains(arr, key, val2) {
|
||||
@@ -29446,11 +29446,11 @@ var require_out = __commonJS({
|
||||
async.read(path15, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync2(path15, optionsOrSettings) {
|
||||
function statSync3(path15, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path15, settings);
|
||||
}
|
||||
exports2.statSync = statSync2;
|
||||
exports2.statSync = statSync3;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -31826,6 +31826,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -31932,7 +31935,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -32267,8 +32288,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
var compare3 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare3;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -32276,8 +32297,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
var compare3 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare3(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -32286,8 +32307,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
var compare3 = require_compare();
|
||||
var compareLoose = (a, b) => compare3(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -32330,8 +32351,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
var compare3 = require_compare();
|
||||
var gt = (a, b, loose) => compare3(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -32340,8 +32361,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
var compare3 = require_compare();
|
||||
var lt = (a, b, loose) => compare3(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -32350,8 +32371,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
var compare3 = require_compare();
|
||||
var eq = (a, b, loose) => compare3(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -32360,8 +32381,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
var compare3 = require_compare();
|
||||
var neq = (a, b, loose) => compare3(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -32370,8 +32391,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
var compare3 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare3(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -32380,8 +32401,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
var compare3 = require_compare();
|
||||
var lte = (a, b, loose) => compare3(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -32693,6 +32714,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug2("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug2("caret", comp);
|
||||
@@ -33277,12 +33299,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
const v = versions.sort((a, b) => compare3(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -33330,7 +33352,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -33390,7 +33412,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
gtltComp = compare3(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -33470,14 +33492,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
const comp = compare3(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
const comp = compare3(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -33501,7 +33523,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare2 = require_compare();
|
||||
var compare3 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -33539,7 +33561,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare: compare2,
|
||||
compare: compare3,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -33584,7 +33606,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -33619,6 +33641,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -33632,14 +33655,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -33650,7 +33673,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -33663,7 +33686,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -35042,14 +35065,14 @@ var require_dist_node14 = __commonJS({
|
||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||
var dist_src_exports = {};
|
||||
__export2(dist_src_exports, {
|
||||
RequestError: () => RequestError
|
||||
RequestError: () => RequestError2
|
||||
});
|
||||
module2.exports = __toCommonJS2(dist_src_exports);
|
||||
var import_deprecation = require_dist_node3();
|
||||
var import_once = __toESM2(require_once());
|
||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||
var RequestError = class extends Error {
|
||||
var RequestError2 = class extends Error {
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
if (Error.captureStackTrace) {
|
||||
@@ -35151,7 +35174,7 @@ var require_dist_node15 = __commonJS({
|
||||
throw error2;
|
||||
}
|
||||
var import_light = __toESM2(require_light());
|
||||
var import_request_error = require_dist_node14();
|
||||
var import_request_error2 = require_dist_node14();
|
||||
async function wrapRequest(state, octokit, request, options) {
|
||||
const limiter = new import_light.default();
|
||||
limiter.on("failed", function(error2, info4) {
|
||||
@@ -35172,7 +35195,7 @@ var require_dist_node15 = __commonJS({
|
||||
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
|
||||
response.data.errors[0].message
|
||||
)) {
|
||||
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
|
||||
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
|
||||
request: options,
|
||||
response
|
||||
});
|
||||
@@ -35688,7 +35711,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -37407,13 +37430,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
exports2.compare = compare3;
|
||||
function compare3(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare2(a, b, true);
|
||||
return compare3(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -37423,7 +37446,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare2(b, a, loose);
|
||||
return compare3(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -37439,27 +37462,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare2(a, b, loose) > 0;
|
||||
return compare3(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare2(a, b, loose) < 0;
|
||||
return compare3(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare2(a, b, loose) === 0;
|
||||
return compare3(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare2(a, b, loose) !== 0;
|
||||
return compare3(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare2(a, b, loose) >= 0;
|
||||
return compare3(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare2(a, b, loose) <= 0;
|
||||
return compare3(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -84821,6 +84844,7 @@ __export(upload_lib_exports, {
|
||||
InvalidSarifUploadError: () => InvalidSarifUploadError,
|
||||
buildPayload: () => buildPayload,
|
||||
findSarifFilesInDir: () => findSarifFilesInDir,
|
||||
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
|
||||
getSarifFilePaths: () => getSarifFilePaths,
|
||||
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
||||
readSarifFile: () => readSarifFile,
|
||||
@@ -84829,6 +84853,7 @@ __export(upload_lib_exports, {
|
||||
shouldShowCombineSarifFilesDeprecationWarning: () => shouldShowCombineSarifFilesDeprecationWarning,
|
||||
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
|
||||
uploadFiles: () => uploadFiles,
|
||||
uploadPayload: () => uploadPayload,
|
||||
uploadSpecifiedFiles: () => uploadSpecifiedFiles,
|
||||
validateSarifFileSchema: () => validateSarifFileSchema,
|
||||
validateUniqueCategory: () => validateUniqueCategory,
|
||||
@@ -88339,6 +88364,9 @@ function isGoodVersion(versionSpec) {
|
||||
function isInTestMode() {
|
||||
return process.env["CODEQL_ACTION_TEST_MODE" /* TEST_MODE */] === "true";
|
||||
}
|
||||
function shouldSkipSarifUpload() {
|
||||
return isInTestMode() || process.env["CODEQL_ACTION_SKIP_SARIF_UPLOAD" /* SKIP_SARIF_UPLOAD */] === "true";
|
||||
}
|
||||
function getTestingEnvironment() {
|
||||
const testingEnvironment = process.env["CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */] || "";
|
||||
if (testingEnvironment === "") {
|
||||
@@ -88473,6 +88501,12 @@ function getWorkflowRunAttempt() {
|
||||
}
|
||||
return workflowRunAttempt;
|
||||
}
|
||||
function isDynamicWorkflow() {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
return isDynamicWorkflow();
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
|
||||
}
|
||||
@@ -88529,6 +88563,57 @@ async function runTool(cmd, args = [], opts = {}) {
|
||||
}
|
||||
return stdout;
|
||||
}
|
||||
var qualityCategoryMapping = {
|
||||
"c#": "csharp",
|
||||
cpp: "c-cpp",
|
||||
c: "c-cpp",
|
||||
"c++": "c-cpp",
|
||||
java: "java-kotlin",
|
||||
javascript: "javascript-typescript",
|
||||
typescript: "javascript-typescript",
|
||||
kotlin: "java-kotlin"
|
||||
};
|
||||
function fixCodeQualityCategory(logger, category) {
|
||||
if (category !== void 0 && isDefaultSetup() && category.startsWith("/language:")) {
|
||||
const language = category.substring("/language:".length);
|
||||
const mappedLanguage = qualityCategoryMapping[language];
|
||||
if (mappedLanguage) {
|
||||
const newCategory = `/language:${mappedLanguage}`;
|
||||
logger.info(
|
||||
`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`
|
||||
);
|
||||
return newCategory;
|
||||
}
|
||||
}
|
||||
return category;
|
||||
}
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
AnalysisKind2["CodeScanning"] = "code-scanning";
|
||||
AnalysisKind2["CodeQuality"] = "code-quality";
|
||||
return AnalysisKind2;
|
||||
})(AnalysisKind || {});
|
||||
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
var CodeScanning = {
|
||||
kind: "code-scanning" /* CodeScanning */,
|
||||
name: "code scanning",
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
kind: "code-quality" /* CodeQuality */,
|
||||
name: "code quality",
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
var SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
// src/api-client.ts
|
||||
var core5 = __toESM(require_core());
|
||||
@@ -88680,6 +88765,45 @@ var path12 = __toESM(require("path"));
|
||||
var core10 = __toESM(require_core());
|
||||
var toolrunner3 = __toESM(require_toolrunner());
|
||||
|
||||
// node_modules/@octokit/request-error/dist-src/index.js
|
||||
var RequestError = class extends Error {
|
||||
name;
|
||||
/**
|
||||
* http status code
|
||||
*/
|
||||
status;
|
||||
/**
|
||||
* Request options that lead to the error.
|
||||
*/
|
||||
request;
|
||||
/**
|
||||
* Response object if a response was received
|
||||
*/
|
||||
response;
|
||||
constructor(message, statusCode, options) {
|
||||
super(message);
|
||||
this.name = "HttpError";
|
||||
this.status = Number.parseInt(statusCode);
|
||||
if (Number.isNaN(this.status)) {
|
||||
this.status = 0;
|
||||
}
|
||||
if ("response" in options) {
|
||||
this.response = options.response;
|
||||
}
|
||||
const requestCopy = Object.assign({}, options.request);
|
||||
if (options.request.headers.authorization) {
|
||||
requestCopy.headers = Object.assign({}, options.request.headers, {
|
||||
authorization: options.request.headers.authorization.replace(
|
||||
/(?<! ) .*$/,
|
||||
" [REDACTED]"
|
||||
)
|
||||
});
|
||||
}
|
||||
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||||
this.request = requestCopy;
|
||||
}
|
||||
};
|
||||
|
||||
// src/cli-errors.ts
|
||||
var SUPPORTED_PLATFORMS = [
|
||||
["linux", "x64"],
|
||||
@@ -88921,14 +89045,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
AnalysisKind2["CodeScanning"] = "code-scanning";
|
||||
AnalysisKind2["CodeQuality"] = "code-quality";
|
||||
return AnalysisKind2;
|
||||
})(AnalysisKind || {});
|
||||
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
@@ -88949,8 +89065,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs5 = __toESM(require("fs"));
|
||||
@@ -89238,6 +89354,11 @@ function isSupportedToolsFeature(versionInfo, feature) {
|
||||
// src/feature-flags.ts
|
||||
var CODEQL_VERSION_ZSTD_BUNDLE = "2.19.0";
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -89880,6 +90001,7 @@ var CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
|
||||
var CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
var CODEQL_BUNDLE_VERSION_ALIAS = ["linked", "latest"];
|
||||
var CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
var CODEQL_TOOLCACHE_INPUT = "toolcache";
|
||||
function getCodeQLBundleExtension(compressionMethod) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
@@ -90021,7 +90143,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, logger) {
|
||||
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
|
||||
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
|
||||
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
|
||||
const compressionMethod2 = inferCompressionMethod(toolsInput);
|
||||
@@ -90058,6 +90180,40 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
|
||||
"`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required."
|
||||
);
|
||||
}
|
||||
} else if (toolsInput !== void 0 && toolsInput === CODEQL_TOOLCACHE_INPUT) {
|
||||
let latestToolcacheVersion;
|
||||
const allowToolcacheValueFF = await features.getValue(
|
||||
"allow_toolcache_input" /* AllowToolcacheInput */
|
||||
);
|
||||
const allowToolcacheValue = allowToolcacheValueFF && (isDynamicWorkflow() || isInTestMode());
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.`
|
||||
);
|
||||
latestToolcacheVersion = getLatestToolcacheVersion(logger);
|
||||
if (latestToolcacheVersion) {
|
||||
cliVersion2 = latestToolcacheVersion;
|
||||
}
|
||||
}
|
||||
if (latestToolcacheVersion === void 0) {
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...`
|
||||
);
|
||||
} else {
|
||||
if (allowToolcacheValueFF) {
|
||||
logger.warning(
|
||||
`Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.`
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Ignoring 'tools: ${toolsInput}' because the feature is not enabled.`
|
||||
);
|
||||
}
|
||||
}
|
||||
cliVersion2 = defaultCliVersion.cliVersion;
|
||||
tagName = defaultCliVersion.tagName;
|
||||
}
|
||||
} else if (toolsInput !== void 0) {
|
||||
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||
url2 = toolsInput;
|
||||
@@ -90266,7 +90422,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
|
||||
}
|
||||
return cliVersion2;
|
||||
}
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
if (!await isBinaryAccessible("tar", logger)) {
|
||||
throw new ConfigurationError(
|
||||
"Could not find tar in PATH, so unable to extract CodeQL bundle."
|
||||
@@ -90279,6 +90435,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
||||
apiDetails,
|
||||
variant,
|
||||
zstdAvailability.available,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
let codeqlFolder;
|
||||
@@ -90364,8 +90521,24 @@ async function getNightlyToolsUrl(logger) {
|
||||
);
|
||||
}
|
||||
}
|
||||
function getLatestToolcacheVersion(logger) {
|
||||
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver7.compare(b, a));
|
||||
logger.debug(
|
||||
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
|
||||
allVersions
|
||||
)}.`
|
||||
);
|
||||
if (allVersions.length > 0) {
|
||||
const latestToolcacheVersion = allVersions[0];
|
||||
logger.info(
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`
|
||||
);
|
||||
return latestToolcacheVersion;
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
function isReservedToolsValue(tools) {
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools);
|
||||
return CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) || CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) || tools === CODEQL_TOOLCACHE_INPUT;
|
||||
}
|
||||
|
||||
// src/tracer-config.ts
|
||||
@@ -90387,7 +90560,7 @@ var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
||||
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
||||
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
var CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
|
||||
try {
|
||||
const {
|
||||
codeqlFolder,
|
||||
@@ -90401,6 +90574,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
logger.debug(
|
||||
@@ -90425,7 +90599,8 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
||||
zstdAvailability
|
||||
};
|
||||
} catch (e) {
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
|
||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
|
||||
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
|
||||
throw new ErrorClass(
|
||||
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
|
||||
|
||||
@@ -91552,7 +91727,7 @@ LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) {
|
||||
};
|
||||
LongPrototype.gte = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.ge = LongPrototype.greaterThanOrEqual;
|
||||
LongPrototype.compare = function compare(other) {
|
||||
LongPrototype.compare = function compare2(other) {
|
||||
if (!isLong(other)) other = fromValue(other);
|
||||
if (this.eq(other)) return 0;
|
||||
var thisNeg = this.isNegative(), otherNeg = other.isNegative();
|
||||
@@ -92103,7 +92278,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
|
||||
// src/init.ts
|
||||
var toolrunner4 = __toESM(require_toolrunner());
|
||||
var io5 = __toESM(require_io());
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const {
|
||||
codeql,
|
||||
@@ -92117,6 +92292,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
true
|
||||
);
|
||||
@@ -92263,6 +92439,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
|
||||
tempDir,
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger
|
||||
);
|
||||
codeQL = initCodeQLResult.codeql;
|
||||
@@ -92318,23 +92495,23 @@ function getAutomationID2(category, analysis_key, environment) {
|
||||
}
|
||||
return computeAutomationID(analysis_key, environment);
|
||||
}
|
||||
async function uploadPayload(payload, repositoryNwo, logger, target) {
|
||||
async function uploadPayload(payload, repositoryNwo, logger, analysis) {
|
||||
logger.info("Uploading results");
|
||||
if (isInTestMode()) {
|
||||
if (shouldSkipSarifUpload()) {
|
||||
const payloadSaveFile = path14.join(
|
||||
getTemporaryDirectory(),
|
||||
"payload.json"
|
||||
`payload-${analysis.kind}.json`
|
||||
);
|
||||
logger.info(
|
||||
`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`
|
||||
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
|
||||
);
|
||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||
fs13.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||
return "test-mode-sarif-id";
|
||||
return "dummy-sarif-id";
|
||||
}
|
||||
const client = getApiClient();
|
||||
try {
|
||||
const response = await client.request(target, {
|
||||
const response = await client.request(analysis.target, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
data: payload
|
||||
@@ -92391,6 +92568,54 @@ function getSarifFilePaths(sarifPath, isSarif) {
|
||||
}
|
||||
return sarifFiles;
|
||||
}
|
||||
async function getGroupedSarifFilePaths(logger, sarifPath) {
|
||||
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
|
||||
if (stats === void 0) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
const results = {};
|
||||
if (stats.isDirectory()) {
|
||||
let unassignedSarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
(name) => path14.extname(name) === ".sarif"
|
||||
);
|
||||
logger.debug(
|
||||
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
|
||||
analysisConfig.sarifPredicate
|
||||
);
|
||||
if (filesForCurrentAnalysis.length > 0) {
|
||||
logger.debug(
|
||||
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`
|
||||
);
|
||||
unassignedSarifFiles = unassignedSarifFiles.filter(
|
||||
(name) => !analysisConfig.sarifPredicate(name)
|
||||
);
|
||||
results[analysisConfig.kind] = filesForCurrentAnalysis;
|
||||
} else {
|
||||
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
|
||||
}
|
||||
}
|
||||
if (unassignedSarifFiles.length !== 0) {
|
||||
logger.warning(
|
||||
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
if (analysisConfig.kind === "code-scanning" /* CodeScanning */ || analysisConfig.sarifPredicate(sarifPath)) {
|
||||
logger.debug(
|
||||
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`
|
||||
);
|
||||
results[analysisConfig.kind] = [sarifPath];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
const parsedSarif = JSON.parse(sarif);
|
||||
@@ -92505,6 +92730,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -92567,7 +92793,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target
|
||||
uploadTarget
|
||||
);
|
||||
logger.endGroup();
|
||||
return {
|
||||
@@ -92750,6 +92976,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
InvalidSarifUploadError,
|
||||
buildPayload,
|
||||
findSarifFilesInDir,
|
||||
getGroupedSarifFilePaths,
|
||||
getSarifFilePaths,
|
||||
populateRunAutomationDetails,
|
||||
readSarifFile,
|
||||
@@ -92758,6 +92985,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
shouldShowCombineSarifFilesDeprecationWarning,
|
||||
throwIfCombineSarifFilesDisabled,
|
||||
uploadFiles,
|
||||
uploadPayload,
|
||||
uploadSpecifiedFiles,
|
||||
validateSarifFileSchema,
|
||||
validateUniqueCategory,
|
||||
|
||||
128
lib/upload-sarif-action-post.js
generated
128
lib/upload-sarif-action-post.js
generated
@@ -24680,6 +24680,9 @@ var require_identifiers = __commonJS({
|
||||
"use strict";
|
||||
var numeric = /^[0-9]+$/;
|
||||
var compareIdentifiers = (a, b) => {
|
||||
if (typeof a === "number" && typeof b === "number") {
|
||||
return a === b ? 0 : a < b ? -1 : 1;
|
||||
}
|
||||
const anum = numeric.test(a);
|
||||
const bnum = numeric.test(b);
|
||||
if (anum && bnum) {
|
||||
@@ -24786,7 +24789,25 @@ var require_semver = __commonJS({
|
||||
if (!(other instanceof _SemVer)) {
|
||||
other = new _SemVer(other, this.options);
|
||||
}
|
||||
return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch);
|
||||
if (this.major < other.major) {
|
||||
return -1;
|
||||
}
|
||||
if (this.major > other.major) {
|
||||
return 1;
|
||||
}
|
||||
if (this.minor < other.minor) {
|
||||
return -1;
|
||||
}
|
||||
if (this.minor > other.minor) {
|
||||
return 1;
|
||||
}
|
||||
if (this.patch < other.patch) {
|
||||
return -1;
|
||||
}
|
||||
if (this.patch > other.patch) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
comparePre(other) {
|
||||
if (!(other instanceof _SemVer)) {
|
||||
@@ -25121,8 +25142,8 @@ var require_compare = __commonJS({
|
||||
"node_modules/semver/functions/compare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var SemVer = require_semver();
|
||||
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare;
|
||||
var compare2 = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
module2.exports = compare2;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -25130,8 +25151,8 @@ var require_compare = __commonJS({
|
||||
var require_rcompare = __commonJS({
|
||||
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var rcompare = (a, b, loose) => compare(b, a, loose);
|
||||
var compare2 = require_compare();
|
||||
var rcompare = (a, b, loose) => compare2(b, a, loose);
|
||||
module2.exports = rcompare;
|
||||
}
|
||||
});
|
||||
@@ -25140,8 +25161,8 @@ var require_rcompare = __commonJS({
|
||||
var require_compare_loose = __commonJS({
|
||||
"node_modules/semver/functions/compare-loose.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var compareLoose = (a, b) => compare(a, b, true);
|
||||
var compare2 = require_compare();
|
||||
var compareLoose = (a, b) => compare2(a, b, true);
|
||||
module2.exports = compareLoose;
|
||||
}
|
||||
});
|
||||
@@ -25184,8 +25205,8 @@ var require_rsort = __commonJS({
|
||||
var require_gt = __commonJS({
|
||||
"node_modules/semver/functions/gt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gt = (a, b, loose) => compare(a, b, loose) > 0;
|
||||
var compare2 = require_compare();
|
||||
var gt = (a, b, loose) => compare2(a, b, loose) > 0;
|
||||
module2.exports = gt;
|
||||
}
|
||||
});
|
||||
@@ -25194,8 +25215,8 @@ var require_gt = __commonJS({
|
||||
var require_lt = __commonJS({
|
||||
"node_modules/semver/functions/lt.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lt = (a, b, loose) => compare(a, b, loose) < 0;
|
||||
var compare2 = require_compare();
|
||||
var lt = (a, b, loose) => compare2(a, b, loose) < 0;
|
||||
module2.exports = lt;
|
||||
}
|
||||
});
|
||||
@@ -25204,8 +25225,8 @@ var require_lt = __commonJS({
|
||||
var require_eq = __commonJS({
|
||||
"node_modules/semver/functions/eq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var eq = (a, b, loose) => compare(a, b, loose) === 0;
|
||||
var compare2 = require_compare();
|
||||
var eq = (a, b, loose) => compare2(a, b, loose) === 0;
|
||||
module2.exports = eq;
|
||||
}
|
||||
});
|
||||
@@ -25214,8 +25235,8 @@ var require_eq = __commonJS({
|
||||
var require_neq = __commonJS({
|
||||
"node_modules/semver/functions/neq.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
|
||||
var compare2 = require_compare();
|
||||
var neq = (a, b, loose) => compare2(a, b, loose) !== 0;
|
||||
module2.exports = neq;
|
||||
}
|
||||
});
|
||||
@@ -25224,8 +25245,8 @@ var require_neq = __commonJS({
|
||||
var require_gte = __commonJS({
|
||||
"node_modules/semver/functions/gte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var gte5 = (a, b, loose) => compare(a, b, loose) >= 0;
|
||||
var compare2 = require_compare();
|
||||
var gte5 = (a, b, loose) => compare2(a, b, loose) >= 0;
|
||||
module2.exports = gte5;
|
||||
}
|
||||
});
|
||||
@@ -25234,8 +25255,8 @@ var require_gte = __commonJS({
|
||||
var require_lte = __commonJS({
|
||||
"node_modules/semver/functions/lte.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var compare = require_compare();
|
||||
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
|
||||
var compare2 = require_compare();
|
||||
var lte = (a, b, loose) => compare2(a, b, loose) <= 0;
|
||||
module2.exports = lte;
|
||||
}
|
||||
});
|
||||
@@ -25547,6 +25568,7 @@ var require_range = __commonJS({
|
||||
return result;
|
||||
};
|
||||
var parseComparator = (comp, options) => {
|
||||
comp = comp.replace(re[t.BUILD], "");
|
||||
debug2("comp", comp, options);
|
||||
comp = replaceCarets(comp, options);
|
||||
debug2("caret", comp);
|
||||
@@ -26131,12 +26153,12 @@ var require_simplify = __commonJS({
|
||||
"node_modules/semver/ranges/simplify.js"(exports2, module2) {
|
||||
"use strict";
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
module2.exports = (versions, range, options) => {
|
||||
const set2 = [];
|
||||
let first = null;
|
||||
let prev = null;
|
||||
const v = versions.sort((a, b) => compare(a, b, options));
|
||||
const v = versions.sort((a, b) => compare2(a, b, options));
|
||||
for (const version of v) {
|
||||
const included = satisfies2(version, range, options);
|
||||
if (included) {
|
||||
@@ -26184,7 +26206,7 @@ var require_subset = __commonJS({
|
||||
var Comparator = require_comparator();
|
||||
var { ANY } = Comparator;
|
||||
var satisfies2 = require_satisfies();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var subset = (sub, dom, options = {}) => {
|
||||
if (sub === dom) {
|
||||
return true;
|
||||
@@ -26244,7 +26266,7 @@ var require_subset = __commonJS({
|
||||
}
|
||||
let gtltComp;
|
||||
if (gt && lt) {
|
||||
gtltComp = compare(gt.semver, lt.semver, options);
|
||||
gtltComp = compare2(gt.semver, lt.semver, options);
|
||||
if (gtltComp > 0) {
|
||||
return null;
|
||||
} else if (gtltComp === 0 && (gt.operator !== ">=" || lt.operator !== "<=")) {
|
||||
@@ -26324,14 +26346,14 @@ var require_subset = __commonJS({
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp > 0 ? a : comp < 0 ? b : b.operator === ">" && a.operator === ">=" ? b : a;
|
||||
};
|
||||
var lowerLT = (a, b, options) => {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
const comp = compare(a.semver, b.semver, options);
|
||||
const comp = compare2(a.semver, b.semver, options);
|
||||
return comp < 0 ? a : comp > 0 ? b : b.operator === "<" && a.operator === "<=" ? b : a;
|
||||
};
|
||||
module2.exports = subset;
|
||||
@@ -26355,7 +26377,7 @@ var require_semver2 = __commonJS({
|
||||
var minor = require_minor();
|
||||
var patch = require_patch();
|
||||
var prerelease = require_prerelease();
|
||||
var compare = require_compare();
|
||||
var compare2 = require_compare();
|
||||
var rcompare = require_rcompare();
|
||||
var compareLoose = require_compare_loose();
|
||||
var compareBuild = require_compare_build();
|
||||
@@ -26393,7 +26415,7 @@ var require_semver2 = __commonJS({
|
||||
minor,
|
||||
patch,
|
||||
prerelease,
|
||||
compare,
|
||||
compare: compare2,
|
||||
rcompare,
|
||||
compareLoose,
|
||||
compareBuild,
|
||||
@@ -26438,7 +26460,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.6",
|
||||
version: "4.30.9",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -26473,6 +26495,7 @@ var require_package = __commonJS({
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
archiver: "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -26486,14 +26509,14 @@ var require_package = __commonJS({
|
||||
long: "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
octokit: "^5.0.3",
|
||||
semver: "^7.7.2",
|
||||
semver: "^7.7.3",
|
||||
uuid: "^13.0.0"
|
||||
},
|
||||
devDependencies: {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -26504,7 +26527,7 @@ var require_package = __commonJS({
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
ava: "^6.4.1",
|
||||
esbuild: "^0.25.10",
|
||||
@@ -26517,7 +26540,7 @@ var require_package = __commonJS({
|
||||
glob: "^11.0.3",
|
||||
nock: "^14.0.10",
|
||||
sinon: "^21.0.0",
|
||||
typescript: "^5.9.2"
|
||||
typescript: "^5.9.3"
|
||||
},
|
||||
overrides: {
|
||||
"@actions/tool-cache": {
|
||||
@@ -88567,8 +88590,8 @@ var require_commonjs16 = __commonJS({
|
||||
if (rootPath === this.root.name) {
|
||||
return this.root;
|
||||
}
|
||||
for (const [compare, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare)) {
|
||||
for (const [compare2, root] of Object.entries(this.roots)) {
|
||||
if (this.sameRoot(rootPath, compare2)) {
|
||||
return this.roots[rootPath] = root;
|
||||
}
|
||||
}
|
||||
@@ -88577,9 +88600,9 @@ var require_commonjs16 = __commonJS({
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
sameRoot(rootPath, compare = this.root.name) {
|
||||
sameRoot(rootPath, compare2 = this.root.name) {
|
||||
rootPath = rootPath.toUpperCase().replace(/\//g, "\\").replace(uncDriveRegexp, "$1\\");
|
||||
return rootPath === compare;
|
||||
return rootPath === compare2;
|
||||
}
|
||||
};
|
||||
exports2.PathWin32 = PathWin32;
|
||||
@@ -92598,7 +92621,7 @@ var require_b4a = __commonJS({
|
||||
function byteLength(string, encoding) {
|
||||
return Buffer.byteLength(string, encoding);
|
||||
}
|
||||
function compare(a, b) {
|
||||
function compare2(a, b) {
|
||||
return Buffer.compare(a, b);
|
||||
}
|
||||
function concat(buffers, totalLength) {
|
||||
@@ -92699,7 +92722,7 @@ var require_b4a = __commonJS({
|
||||
allocUnsafe,
|
||||
allocUnsafeSlow,
|
||||
byteLength,
|
||||
compare,
|
||||
compare: compare2,
|
||||
concat,
|
||||
copy,
|
||||
equals,
|
||||
@@ -106366,7 +106389,7 @@ var require_brace_expansion3 = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -108085,13 +108108,13 @@ var require_semver3 = __commonJS({
|
||||
function patch(a, loose) {
|
||||
return new SemVer(a, loose).patch;
|
||||
}
|
||||
exports2.compare = compare;
|
||||
function compare(a, b, loose) {
|
||||
exports2.compare = compare2;
|
||||
function compare2(a, b, loose) {
|
||||
return new SemVer(a, loose).compare(new SemVer(b, loose));
|
||||
}
|
||||
exports2.compareLoose = compareLoose;
|
||||
function compareLoose(a, b) {
|
||||
return compare(a, b, true);
|
||||
return compare2(a, b, true);
|
||||
}
|
||||
exports2.compareBuild = compareBuild;
|
||||
function compareBuild(a, b, loose) {
|
||||
@@ -108101,7 +108124,7 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.rcompare = rcompare;
|
||||
function rcompare(a, b, loose) {
|
||||
return compare(b, a, loose);
|
||||
return compare2(b, a, loose);
|
||||
}
|
||||
exports2.sort = sort;
|
||||
function sort(list, loose) {
|
||||
@@ -108117,27 +108140,27 @@ var require_semver3 = __commonJS({
|
||||
}
|
||||
exports2.gt = gt;
|
||||
function gt(a, b, loose) {
|
||||
return compare(a, b, loose) > 0;
|
||||
return compare2(a, b, loose) > 0;
|
||||
}
|
||||
exports2.lt = lt;
|
||||
function lt(a, b, loose) {
|
||||
return compare(a, b, loose) < 0;
|
||||
return compare2(a, b, loose) < 0;
|
||||
}
|
||||
exports2.eq = eq;
|
||||
function eq(a, b, loose) {
|
||||
return compare(a, b, loose) === 0;
|
||||
return compare2(a, b, loose) === 0;
|
||||
}
|
||||
exports2.neq = neq;
|
||||
function neq(a, b, loose) {
|
||||
return compare(a, b, loose) !== 0;
|
||||
return compare2(a, b, loose) !== 0;
|
||||
}
|
||||
exports2.gte = gte5;
|
||||
function gte5(a, b, loose) {
|
||||
return compare(a, b, loose) >= 0;
|
||||
return compare2(a, b, loose) >= 0;
|
||||
}
|
||||
exports2.lte = lte;
|
||||
function lte(a, b, loose) {
|
||||
return compare(a, b, loose) <= 0;
|
||||
return compare2(a, b, loose) <= 0;
|
||||
}
|
||||
exports2.cmp = cmp;
|
||||
function cmp(a, op, b, loose) {
|
||||
@@ -117377,6 +117400,11 @@ function isSafeArtifactUpload(codeQlVersion) {
|
||||
|
||||
// src/feature-flags.ts
|
||||
var featureConfig = {
|
||||
["allow_toolcache_input" /* AllowToolcacheInput */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: void 0
|
||||
},
|
||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -117619,7 +117647,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
|
||||
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => path.extname(f) === ".sarif");
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
|
||||
603
lib/upload-sarif-action.js
generated
603
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
264
package-lock.json
generated
264
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "3.30.6",
|
||||
"version": "4.30.9",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "3.30.6",
|
||||
"version": "4.30.9",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.3.1",
|
||||
@@ -20,6 +20,7 @@
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -33,14 +34,14 @@
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
"octokit": "^5.0.3",
|
||||
"semver": "^7.7.2",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -51,7 +52,7 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.10",
|
||||
@@ -64,7 +65,7 @@
|
||||
"glob": "^11.0.3",
|
||||
"nock": "^14.0.10",
|
||||
"sinon": "^21.0.0",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@aashutoshrathi/word-wrap": {
|
||||
@@ -1346,9 +1347,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint/js": {
|
||||
"version": "9.36.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.36.0.tgz",
|
||||
"integrity": "sha512-uhCbYtYynH30iZErszX78U+nR3pJU3RHGQ57NXy5QupD4SBVwDeU8TNBy+MjMngc1UyIW9noKqsRqfjQTBU2dw==",
|
||||
"version": "9.37.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.37.0.tgz",
|
||||
"integrity": "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -2175,7 +2176,6 @@
|
||||
"version": "26.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-26.0.0.tgz",
|
||||
"integrity": "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@octokit/openapi-webhooks-types": {
|
||||
@@ -2299,31 +2299,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request-error": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.0.tgz",
|
||||
"integrity": "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg==",
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.1.tgz",
|
||||
"integrity": "sha512-CZpFwV4+1uBrxu7Cw8E5NCXDWFNf18MSY23TdxCBgjw1tXXHvTrZVsXlW8hgFTOLw8RQR1BBrMvYRtuyaijHMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/types": "^14.0.0"
|
||||
"@octokit/types": "^15.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 20"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
|
||||
"integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
|
||||
"version": "14.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
|
||||
"integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^25.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
|
||||
"version": "25.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
|
||||
@@ -2348,7 +2334,6 @@
|
||||
"version": "15.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-15.0.0.tgz",
|
||||
"integrity": "sha512-8o6yDfmoGJUIeR9OfYU0/TUJTnMPG2r68+1yEdUeG2Fdqpj8Qetg0ziKIgcBm0RW/j29H41WP37CYCEhp6GoHQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^26.0.0"
|
||||
@@ -2712,17 +2697,17 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.44.1.tgz",
|
||||
"integrity": "sha512-molgphGqOBT7t4YKCSkbasmu1tb1MgrZ2szGzHbclF7PNmOkSTQVHy+2jXOSnxvR3+Xe1yySHFZoqMpz3TfQsw==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.0.tgz",
|
||||
"integrity": "sha512-hA8gxBq4ukonVXPy0OKhiaUh/68D0E88GSmtC1iAEnGaieuDi38LhS7jdCHRLi6ErJBNDGCzvh5EnzdPwUc0DA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.44.1",
|
||||
"@typescript-eslint/type-utils": "8.44.1",
|
||||
"@typescript-eslint/utils": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1",
|
||||
"@typescript-eslint/scope-manager": "8.46.0",
|
||||
"@typescript-eslint/type-utils": "8.46.0",
|
||||
"@typescript-eslint/utils": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^7.0.0",
|
||||
"natural-compare": "^1.4.0",
|
||||
@@ -2736,20 +2721,20 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.44.1",
|
||||
"@typescript-eslint/parser": "^8.46.0",
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.1.tgz",
|
||||
"integrity": "sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.0.tgz",
|
||||
"integrity": "sha512-lWETPa9XGcBes4jqAMYD9fW0j4n6hrPtTJwWDmtqgFO/4HF4jmdH/Q6wggTw5qIT5TXjKzbt7GsZUBnWoO3dqw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1"
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -2760,9 +2745,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz",
|
||||
"integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.0.tgz",
|
||||
"integrity": "sha512-bHGGJyVjSE4dJJIO5yyEWt/cHyNwga/zXGJbJJ8TiO01aVREK6gCTu3L+5wrkb1FbDkQ+TKjMNe9R/QQQP9+rA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -2774,16 +2759,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.1.tgz",
|
||||
"integrity": "sha512-qnQJ+mVa7szevdEyvfItbO5Vo+GfZ4/GZWWDRRLjrxYPkhM+6zYB2vRYwCsoJLzqFCdZT4mEqyJoyzkunsZ96A==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.0.tgz",
|
||||
"integrity": "sha512-ekDCUfVpAKWJbRfm8T1YRrCot1KFxZn21oV76v5Fj4tr7ELyk84OS+ouvYdcDAwZL89WpEkEj2DKQ+qg//+ucg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.44.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1",
|
||||
"@typescript-eslint/project-service": "8.46.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -2803,16 +2788,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.44.1.tgz",
|
||||
"integrity": "sha512-DpX5Fp6edTlocMCwA+mHY8Mra+pPjRZ0TfHkXI8QFelIKcbADQz1LUPNtzOFUriBB2UYqw4Pi9+xV4w9ZczHFg==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.0.tgz",
|
||||
"integrity": "sha512-nD6yGWPj1xiOm4Gk0k6hLSZz2XkNXhuYmyIrOWcHoPuAhjT9i5bAG+xbWPgFeNR8HPHHtpNKdYUXJl/D3x7f5g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/scope-manager": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/typescript-estree": "8.44.1"
|
||||
"@typescript-eslint/scope-manager": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/typescript-estree": "8.46.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -2827,13 +2812,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.1.tgz",
|
||||
"integrity": "sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.0.tgz",
|
||||
"integrity": "sha512-FrvMpAK+hTbFy7vH5j1+tMYHMSKLE6RzluFJlkFNKD0p9YsUT75JlBSmr5so3QRzvMwU5/bIEdeNrxm8du8l3Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2906,16 +2891,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.44.1.tgz",
|
||||
"integrity": "sha512-EHrrEsyhOhxYt8MTg4zTF+DJMuNBzWwgvvOYNj/zm1vnaD/IC5zCXFehZv94Piqa2cRFfXrTFxIvO95L7Qc/cw==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.0.tgz",
|
||||
"integrity": "sha512-n1H6IcDhmmUEG7TNVSspGmiHHutt7iVKtZwRppD7e04wha5MrkV1h3pti9xQLcCMt6YWsncpoT0HMjkH1FNwWQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/typescript-estree": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1",
|
||||
"@typescript-eslint/scope-manager": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/typescript-estree": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2931,14 +2916,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.1.tgz",
|
||||
"integrity": "sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.0.tgz",
|
||||
"integrity": "sha512-lWETPa9XGcBes4jqAMYD9fW0j4n6hrPtTJwWDmtqgFO/4HF4jmdH/Q6wggTw5qIT5TXjKzbt7GsZUBnWoO3dqw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1"
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -2949,9 +2934,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz",
|
||||
"integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.0.tgz",
|
||||
"integrity": "sha512-bHGGJyVjSE4dJJIO5yyEWt/cHyNwga/zXGJbJJ8TiO01aVREK6gCTu3L+5wrkb1FbDkQ+TKjMNe9R/QQQP9+rA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -2963,16 +2948,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.1.tgz",
|
||||
"integrity": "sha512-qnQJ+mVa7szevdEyvfItbO5Vo+GfZ4/GZWWDRRLjrxYPkhM+6zYB2vRYwCsoJLzqFCdZT4mEqyJoyzkunsZ96A==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.0.tgz",
|
||||
"integrity": "sha512-ekDCUfVpAKWJbRfm8T1YRrCot1KFxZn21oV76v5Fj4tr7ELyk84OS+ouvYdcDAwZL89WpEkEj2DKQ+qg//+ucg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.44.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1",
|
||||
"@typescript-eslint/project-service": "8.46.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -2992,13 +2977,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.1.tgz",
|
||||
"integrity": "sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.0.tgz",
|
||||
"integrity": "sha512-FrvMpAK+hTbFy7vH5j1+tMYHMSKLE6RzluFJlkFNKD0p9YsUT75JlBSmr5so3QRzvMwU5/bIEdeNrxm8du8l3Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -3062,14 +3047,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/project-service": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.44.1.tgz",
|
||||
"integrity": "sha512-ycSa60eGg8GWAkVsKV4E6Nz33h+HjTXbsDT4FILyL8Obk5/mx4tbvCNsLf9zret3ipSumAOG89UcCs/KRaKYrA==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.0.tgz",
|
||||
"integrity": "sha512-OEhec0mH+U5Je2NZOeK1AbVCdm0ChyapAyTeXVIYTPXDJ3F07+cu87PPXcGoYqZ7M9YJVvFnfpGg1UmCIqM+QQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.44.1",
|
||||
"@typescript-eslint/types": "^8.44.1",
|
||||
"@typescript-eslint/tsconfig-utils": "^8.46.0",
|
||||
"@typescript-eslint/types": "^8.46.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -3084,9 +3069,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/project-service/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz",
|
||||
"integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.0.tgz",
|
||||
"integrity": "sha512-bHGGJyVjSE4dJJIO5yyEWt/cHyNwga/zXGJbJJ8TiO01aVREK6gCTu3L+5wrkb1FbDkQ+TKjMNe9R/QQQP9+rA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -3116,9 +3101,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.44.1.tgz",
|
||||
"integrity": "sha512-B5OyACouEjuIvof3o86lRMvyDsFwZm+4fBOqFHccIctYgBjqR3qT39FBYGN87khcgf0ExpdCBeGKpKRhSFTjKQ==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.0.tgz",
|
||||
"integrity": "sha512-WrYXKGAHY836/N7zoK/kzi6p8tXFhasHh8ocFL9VZSAkvH956gfeRfcnhs3xzRy8qQ/dq3q44v1jvQieMFg2cw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -3133,15 +3118,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.44.1.tgz",
|
||||
"integrity": "sha512-KdEerZqHWXsRNKjF9NYswNISnFzXfXNDfPxoTh7tqohU/PRIbwTmsjGK6V9/RTYWau7NZvfo52lgVk+sJh0K3g==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.0.tgz",
|
||||
"integrity": "sha512-hy+lvYV1lZpVs2jRaEYvgCblZxUoJiPyCemwbQZ+NGulWkQRy0HRPYAoef/CNSzaLt+MLvMptZsHXHlkEilaeg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/typescript-estree": "8.44.1",
|
||||
"@typescript-eslint/utils": "8.44.1",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/typescript-estree": "8.46.0",
|
||||
"@typescript-eslint/utils": "8.46.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
},
|
||||
@@ -3158,14 +3143,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.44.1.tgz",
|
||||
"integrity": "sha512-NdhWHgmynpSvyhchGLXh+w12OMT308Gm25JoRIyTZqEbApiBiQHD/8xgb6LqCWCFcxFtWwaVdFsLPQI3jvhywg==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.0.tgz",
|
||||
"integrity": "sha512-lWETPa9XGcBes4jqAMYD9fW0j4n6hrPtTJwWDmtqgFO/4HF4jmdH/Q6wggTw5qIT5TXjKzbt7GsZUBnWoO3dqw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1"
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -3176,9 +3161,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.44.1.tgz",
|
||||
"integrity": "sha512-Lk7uj7y9uQUOEguiDIDLYLJOrYHQa7oBiURYVFqIpGxclAFQ78f6VUOM8lI2XEuNOKNB7XuvM2+2cMXAoq4ALQ==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.0.tgz",
|
||||
"integrity": "sha512-bHGGJyVjSE4dJJIO5yyEWt/cHyNwga/zXGJbJJ8TiO01aVREK6gCTu3L+5wrkb1FbDkQ+TKjMNe9R/QQQP9+rA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
@@ -3190,16 +3175,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.44.1.tgz",
|
||||
"integrity": "sha512-qnQJ+mVa7szevdEyvfItbO5Vo+GfZ4/GZWWDRRLjrxYPkhM+6zYB2vRYwCsoJLzqFCdZT4mEqyJoyzkunsZ96A==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.0.tgz",
|
||||
"integrity": "sha512-ekDCUfVpAKWJbRfm8T1YRrCot1KFxZn21oV76v5Fj4tr7ELyk84OS+ouvYdcDAwZL89WpEkEj2DKQ+qg//+ucg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.44.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/visitor-keys": "8.44.1",
|
||||
"@typescript-eslint/project-service": "8.46.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/visitor-keys": "8.46.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
@@ -3219,16 +3204,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.44.1.tgz",
|
||||
"integrity": "sha512-DpX5Fp6edTlocMCwA+mHY8Mra+pPjRZ0TfHkXI8QFelIKcbADQz1LUPNtzOFUriBB2UYqw4Pi9+xV4w9ZczHFg==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.0.tgz",
|
||||
"integrity": "sha512-nD6yGWPj1xiOm4Gk0k6hLSZz2XkNXhuYmyIrOWcHoPuAhjT9i5bAG+xbWPgFeNR8HPHHtpNKdYUXJl/D3x7f5g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/scope-manager": "8.44.1",
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/typescript-estree": "8.44.1"
|
||||
"@typescript-eslint/scope-manager": "8.46.0",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"@typescript-eslint/typescript-estree": "8.46.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -3243,13 +3228,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.44.1.tgz",
|
||||
"integrity": "sha512-576+u0QD+Jp3tZzvfRfxon0EA2lzcDt3lhUbsC6Lgzy9x2VR4E+JUiNyGHi5T8vk0TV+fpJ5GLG1JsJuWCaKhw==",
|
||||
"version": "8.46.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.0.tgz",
|
||||
"integrity": "sha512-FrvMpAK+hTbFy7vH5j1+tMYHMSKLE6RzluFJlkFNKD0p9YsUT75JlBSmr5so3QRzvMwU5/bIEdeNrxm8du8l3Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.44.1",
|
||||
"@typescript-eslint/types": "8.46.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
@@ -4200,7 +4185,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@@ -8223,9 +8210,10 @@
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.7.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
|
||||
"integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
|
||||
"version": "7.7.3",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz",
|
||||
"integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
},
|
||||
@@ -9041,9 +9029,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.9.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz",
|
||||
"integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==",
|
||||
"version": "5.9.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
|
||||
11
package.json
11
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "3.30.6",
|
||||
"version": "4.30.9",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -35,6 +35,7 @@
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -48,14 +49,14 @@
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
"octokit": "^5.0.3",
|
||||
"semver": "^7.7.2",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -66,7 +67,7 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.10",
|
||||
@@ -79,7 +80,7 @@
|
||||
"glob": "^11.0.3",
|
||||
"nock": "^14.0.10",
|
||||
"sinon": "^21.0.0",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@actions/tool-cache": {
|
||||
|
||||
32
pr-checks/checks/bundle-from-toolcache.yml
Normal file
32
pr-checks/checks/bundle-from-toolcache.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
name: "Bundle: From toolcache"
|
||||
description: "The CodeQL bundle should be cached within the toolcache"
|
||||
versions:
|
||||
- toolcache
|
||||
steps:
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
console.log(`Found CodeQL versions: ${allCodeqlVersions}`);
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error('CodeQL not found in toolcache');
|
||||
}
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@0481980f17b760ef6bca5e8c55809102a0af1e5a # v1.263.0
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
name: "Upload-sarif: code quality endpoint"
|
||||
description: "Checks that uploading SARIFs to the code quality endpoint works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
- name: "Check output from `upload-sarif` step"
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
run: exit 1
|
||||
81
pr-checks/checks/upload-sarif.yml
Normal file
81
pr-checks/checks/upload-sarif.yml
Normal file
@@ -0,0 +1,81 @@
|
||||
name: "Test different uses of `upload-sarif`"
|
||||
description: "Checks that uploading SARIFs to the code quality endpoint works"
|
||||
versions: ["default"]
|
||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||
installGo: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: ${{ matrix.analysis-kinds }}
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
upload: never
|
||||
output: ${{ runner.temp }}/results
|
||||
|
||||
- name: |
|
||||
Upload all SARIF files for `analysis-kinds: ${{ matrix.analysis-kinds }}`
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
|
||||
- name: "Fail for missing output from `upload-sarif` step for `code-scanning`"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
- name: "Fail for missing output from `upload-sarif` step for `code-quality`"
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)"
|
||||
run: exit 1
|
||||
|
||||
- name: Upload single SARIF file for Code Scanning
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-scanning
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
|
||||
- name: "Fail for missing output from `upload-single-sarif-code-scanning` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
- name: Upload single SARIF file for Code Quality
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-quality
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
|
||||
- name: "Fail for missing output from `upload-single-sarif-code-quality` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)"
|
||||
run: exit 1
|
||||
|
||||
- name: Change SARIF file extension
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
|
||||
- name: Upload single non-`.sarif` file
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-non-sarif
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
|
||||
- name: "Fail for missing output from `upload-single-non-sarif` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
@@ -37,28 +37,29 @@ steps:
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
run: |
|
||||
PAYLOAD_FILE="$RUNNER_TEMP/payload-code-scanning.json"
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
ACTUAL_COMMIT_OID="$(cat "$PAYLOAD_FILE" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$PAYLOAD_FILE" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$PAYLOAD_FILE" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -21,5 +21,5 @@ outputs:
|
||||
environment:
|
||||
description: The inferred build environment configuration.
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: '../lib/resolve-environment-action.js'
|
||||
|
||||
@@ -247,9 +247,14 @@ export function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
|
||||
/** Determines whether the workflow trigger is `dynamic`. */
|
||||
export function isDynamicWorkflow(): boolean {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
|
||||
/** Determines whether we are running in default setup. */
|
||||
export function isDefaultSetup(): boolean {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
return isDynamicWorkflow();
|
||||
}
|
||||
|
||||
export function prettyPrintInvocation(cmd: string, args: string[]): string {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { fixCodeQualityCategory } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
export enum AnalysisKind {
|
||||
@@ -61,6 +63,8 @@ export interface AnalysisConfig {
|
||||
/** A predicate on filenames to decide whether a SARIF file
|
||||
* belongs to this kind of analysis. */
|
||||
sarifPredicate: (name: string) => boolean;
|
||||
/** Analysis-specific adjustment of the category. */
|
||||
fixCategory: (logger: Logger, category?: string) => string | undefined;
|
||||
/** A prefix for environment variables used to track the uniqueness of SARIF uploads. */
|
||||
sentinelPrefix: string;
|
||||
}
|
||||
@@ -74,6 +78,7 @@ export const CodeScanning: AnalysisConfig = {
|
||||
sarifPredicate: (name) =>
|
||||
name.endsWith(CodeScanning.sarifExtension) &&
|
||||
!CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_",
|
||||
};
|
||||
|
||||
@@ -84,5 +89,29 @@ export const CodeQuality: AnalysisConfig = {
|
||||
target: SARIF_UPLOAD_ENDPOINT.CODE_QUALITY,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_",
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the `AnalysisConfig` corresponding to `kind`.
|
||||
* @param kind The analysis kind to get the `AnalysisConfig` for.
|
||||
* @returns The `AnalysisConfig` corresponding to `kind`.
|
||||
*/
|
||||
export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
|
||||
// Using a switch statement here accomplishes two things:
|
||||
// 1. The type checker believes us that we have a case for every `AnalysisKind`.
|
||||
// 2. If we ever add another member to `AnalysisKind`, the type checker will alert us that we have to add a case.
|
||||
switch (kind) {
|
||||
case AnalysisKind.CodeScanning:
|
||||
return CodeScanning;
|
||||
case AnalysisKind.CodeQuality:
|
||||
return CodeQuality;
|
||||
}
|
||||
}
|
||||
|
||||
// Since we have overlapping extensions (i.e. ".sarif" includes ".quality.sarif"),
|
||||
// we want to scan a folder containing SARIF files in an order that finds the more
|
||||
// specific extensions first. This constant defines an array in the order of analyis
|
||||
// configurations with more specific extensions to less specific extensions.
|
||||
export const SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
@@ -26,7 +26,10 @@ import {
|
||||
isCodeScanningEnabled,
|
||||
} from "./config-utils";
|
||||
import { uploadDatabases } from "./database-upload";
|
||||
import { uploadDependencyCaches } from "./dependency-caching";
|
||||
import {
|
||||
DependencyCacheUploadStatusReport,
|
||||
uploadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
@@ -55,10 +58,15 @@ interface AnalysisStatusReport
|
||||
extends uploadLib.UploadStatusReport,
|
||||
QueriesStatusReport {}
|
||||
|
||||
interface DependencyCachingUploadStatusReport {
|
||||
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
|
||||
}
|
||||
|
||||
interface FinishStatusReport
|
||||
extends StatusReportBase,
|
||||
DatabaseCreationTimings,
|
||||
AnalysisStatusReport {}
|
||||
AnalysisStatusReport,
|
||||
DependencyCachingUploadStatusReport {}
|
||||
|
||||
interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
|
||||
/** Size of TRAP caches that we uploaded, in bytes. */
|
||||
@@ -76,6 +84,7 @@ async function sendStatusReport(
|
||||
dbCreationTimings: DatabaseCreationTimings | undefined,
|
||||
didUploadTrapCaches: boolean,
|
||||
trapCacheCleanup: TrapCacheCleanupStatusReport | undefined,
|
||||
dependencyCacheResults: DependencyCacheUploadStatusReport | undefined,
|
||||
logger: Logger,
|
||||
) {
|
||||
const status = getActionsStatus(error, stats?.analyze_failure_language);
|
||||
@@ -95,6 +104,7 @@ async function sendStatusReport(
|
||||
...(stats || {}),
|
||||
...(dbCreationTimings || {}),
|
||||
...(trapCacheCleanup || {}),
|
||||
dependency_caching_upload_results: dependencyCacheResults,
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
@@ -209,6 +219,7 @@ async function run() {
|
||||
let trapCacheUploadTime: number | undefined = undefined;
|
||||
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined;
|
||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||
|
||||
// Make inputs accessible in the `post` step, details at
|
||||
@@ -345,16 +356,14 @@ async function run() {
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = analyses.CodeQuality;
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.fixCodeQualityCategory(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
analysis,
|
||||
);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
@@ -388,7 +397,11 @@ async function run() {
|
||||
Feature.JavaMinimizeDependencyJars,
|
||||
codeql,
|
||||
);
|
||||
await uploadDependencyCaches(config, logger, minimizeJavaJars);
|
||||
dependencyCacheResults = await uploadDependencyCaches(
|
||||
config,
|
||||
logger,
|
||||
minimizeJavaJars,
|
||||
);
|
||||
}
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
@@ -431,6 +444,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
return;
|
||||
@@ -449,6 +463,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else if (runStats) {
|
||||
@@ -461,6 +476,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else {
|
||||
@@ -473,6 +489,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -334,7 +334,7 @@ test("resolveQuerySuiteAlias", (t) => {
|
||||
for (const suite of defaultSuites) {
|
||||
const resolved = resolveQuerySuiteAlias(KnownLanguage.go, suite);
|
||||
t.assert(
|
||||
resolved.endsWith(".qls"),
|
||||
path.extname(resolved) === ".qls",
|
||||
"Resolved default suite doesn't end in .qls",
|
||||
);
|
||||
t.assert(
|
||||
|
||||
@@ -7,7 +7,6 @@ import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
fixCodeQualityCategory,
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
PullRequestBranches,
|
||||
@@ -781,7 +780,7 @@ export async function runQueries(
|
||||
// accepted by the Code Quality backend.
|
||||
let category = automationDetailsId;
|
||||
if (analysis.kind === analyses.AnalysisKind.CodeQuality) {
|
||||
category = fixCodeQualityCategory(logger, automationDetailsId);
|
||||
category = analysis.fixCategory(logger, automationDetailsId);
|
||||
}
|
||||
|
||||
const sarifFile = path.join(
|
||||
|
||||
@@ -245,7 +245,7 @@ export interface ActionsCacheItem {
|
||||
/** List all Actions cache entries matching the provided key and ref. */
|
||||
export async function listActionsCaches(
|
||||
key: string,
|
||||
ref: string,
|
||||
ref?: string,
|
||||
): Promise<ActionsCacheItem[]> {
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
|
||||
@@ -52,11 +52,11 @@ export async function determineAutobuildLanguages(
|
||||
* For example, consider a user with the following workflow file:
|
||||
*
|
||||
* ```yml
|
||||
* - uses: github/codeql-action/init@v3
|
||||
* - uses: github/codeql-action/init@v4
|
||||
* with:
|
||||
* languages: go, java
|
||||
* - uses: github/codeql-action/autobuild@v3
|
||||
* - uses: github/codeql-action/analyze@v3
|
||||
* - uses: github/codeql-action/autobuild@v4
|
||||
* - uses: github/codeql-action/analyze@v4
|
||||
* ```
|
||||
*
|
||||
* - With Go extraction disabled, we will run the Java autobuilder in the
|
||||
|
||||
@@ -74,6 +74,7 @@ async function installIntoToolcache({
|
||||
cliVersion !== undefined
|
||||
? { cliVersion, tagName }
|
||||
: SAMPLE_DEFAULT_CLI_VERSION,
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -122,6 +123,8 @@ async function stubCodeql(): Promise<codeql.CodeQL> {
|
||||
}
|
||||
|
||||
test("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -140,6 +143,7 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -154,6 +158,8 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
|
||||
});
|
||||
|
||||
test("caches semantically versioned bundles using their semantic version number", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const url = mockBundleDownloadApi({
|
||||
@@ -166,6 +172,7 @@ test("caches semantically versioned bundles using their semantic version number"
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -181,6 +188,8 @@ test("caches semantically versioned bundles using their semantic version number"
|
||||
});
|
||||
|
||||
test("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -199,6 +208,7 @@ test("downloads an explicitly requested bundle even if a different version is ca
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -227,6 +237,8 @@ for (const {
|
||||
expectedToolcacheVersion,
|
||||
} of EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES) {
|
||||
test(`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -243,6 +255,7 @@ for (const {
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -266,6 +279,8 @@ for (const toolcacheVersion of [
|
||||
`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
|
||||
`${toolcacheVersion} is installed`,
|
||||
async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -281,6 +296,7 @@ for (const toolcacheVersion of [
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -295,6 +311,8 @@ for (const toolcacheVersion of [
|
||||
}
|
||||
|
||||
test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -313,6 +331,7 @@ test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
},
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -328,6 +347,8 @@ test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
});
|
||||
|
||||
test(`downloads bundle if only an unpinned version is cached on GHES`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -349,6 +370,7 @@ test(`downloads bundle if only an unpinned version is cached on GHES`, async (t)
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
},
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -364,6 +386,8 @@ test(`downloads bundle if only an unpinned version is cached on GHES`, async (t)
|
||||
});
|
||||
|
||||
test('downloads bundle if "latest" tools specified but not cached', async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -382,6 +406,7 @@ test('downloads bundle if "latest" tools specified but not cached', async (t) =>
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -397,6 +422,8 @@ test('downloads bundle if "latest" tools specified but not cached', async (t) =>
|
||||
});
|
||||
|
||||
test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -417,6 +444,7 @@ test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t)
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
|
||||
@@ -3,6 +3,7 @@ import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import { RequestError } from "@octokit/request-error";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
@@ -308,6 +309,7 @@ const CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param defaultCliVersion
|
||||
* @param features Information about the features that are enabled.
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
@@ -319,6 +321,7 @@ export async function setupCodeQL(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
checkVersion: boolean,
|
||||
): Promise<{
|
||||
@@ -341,6 +344,7 @@ export async function setupCodeQL(
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -370,7 +374,8 @@ export async function setupCodeQL(
|
||||
} catch (e) {
|
||||
const ErrorClass =
|
||||
e instanceof util.ConfigurationError ||
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
|
||||
(e instanceof RequestError && e.status === 429) // rate limited
|
||||
? util.ConfigurationError
|
||||
: Error;
|
||||
|
||||
|
||||
@@ -723,7 +723,14 @@ export async function getOverlayDatabaseMode(
|
||||
buildMode !== BuildMode.None &&
|
||||
(
|
||||
await Promise.all(
|
||||
languages.map(async (l) => await codeql.isTracedLanguage(l)),
|
||||
languages.map(
|
||||
async (l) =>
|
||||
l !== KnownLanguage.go && // Workaround to allow overlay analysis for Go with any build
|
||||
// mode, since it does not yet support BMN. The Go autobuilder and/or extractor will
|
||||
// ensure that overlay-base databases are only created for supported Go build setups,
|
||||
// and that we'll fall back to full databases in other cases.
|
||||
(await codeql.isTracedLanguage(l)),
|
||||
),
|
||||
)
|
||||
).some(Boolean)
|
||||
) {
|
||||
|
||||
@@ -153,7 +153,6 @@ const packSpecPrettyPrintingMacro = test.macro({
|
||||
title: (
|
||||
_providedTitle: string | undefined,
|
||||
packStr: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_packObj: dbConfig.Pack,
|
||||
) => `Prettyprint pack spec: '${packStr}'`,
|
||||
});
|
||||
|
||||
@@ -59,7 +59,7 @@ export async function uploadCombinedSarifArtifacts(
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
.filter((f) => path.extname(f) === ".sarif");
|
||||
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.1",
|
||||
"cliVersion": "2.23.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.0",
|
||||
"priorCliVersion": "2.23.0"
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
}
|
||||
|
||||
@@ -5,12 +5,13 @@ import * as actionsCache from "@actions/cache";
|
||||
import * as glob from "@actions/glob";
|
||||
|
||||
import { getTemporaryDirectory } from "./actions-util";
|
||||
import { listActionsCaches } from "./api-client";
|
||||
import { getTotalCacheSize } from "./caching-utils";
|
||||
import { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { getRequiredEnvParam } from "./util";
|
||||
import { getErrorMessage, getRequiredEnvParam } from "./util";
|
||||
|
||||
/**
|
||||
* Caching configuration for a particular language.
|
||||
@@ -84,20 +85,42 @@ async function makeGlobber(patterns: string[]): Promise<glob.Globber> {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
|
||||
/** Enumerates possible outcomes for cache hits. */
|
||||
export enum CacheHitKind {
|
||||
/** We were unable to calculate a hash for the key. */
|
||||
NoHash = "no-hash",
|
||||
/** No cache was found. */
|
||||
Miss = "miss",
|
||||
/** The primary cache key matched. */
|
||||
Exact = "exact",
|
||||
/** A restore key matched. */
|
||||
Partial = "partial",
|
||||
}
|
||||
|
||||
/** Represents results of trying to restore a dependency cache for a language. */
|
||||
export interface DependencyCacheRestoreStatus {
|
||||
language: Language;
|
||||
hit_kind: CacheHitKind;
|
||||
download_duration_ms?: number;
|
||||
}
|
||||
|
||||
/** An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration. */
|
||||
export type DependencyCacheRestoreStatusReport = DependencyCacheRestoreStatus[];
|
||||
|
||||
/**
|
||||
* Attempts to restore dependency caches for the languages being analyzed.
|
||||
*
|
||||
* @param languages The languages being analyzed.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
|
||||
* @returns A list of languages for which dependency caches were restored.
|
||||
* @returns An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration.
|
||||
*/
|
||||
export async function downloadDependencyCaches(
|
||||
languages: Language[],
|
||||
logger: Logger,
|
||||
minimizeJavaJars: boolean,
|
||||
): Promise<Language[]> {
|
||||
const restoredCaches: Language[] = [];
|
||||
): Promise<DependencyCacheRestoreStatusReport> {
|
||||
const status: DependencyCacheRestoreStatusReport = [];
|
||||
|
||||
for (const language of languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
@@ -114,6 +137,7 @@ export async function downloadDependencyCaches(
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, hit_kind: CacheHitKind.NoHash });
|
||||
logger.info(
|
||||
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
|
||||
);
|
||||
@@ -131,35 +155,66 @@ export async function downloadDependencyCaches(
|
||||
)}`,
|
||||
);
|
||||
|
||||
const start = performance.now();
|
||||
const hitKey = await actionsCache.restoreCache(
|
||||
cacheConfig.paths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
);
|
||||
const download_duration_ms = Math.round(performance.now() - start);
|
||||
|
||||
if (hitKey !== undefined) {
|
||||
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
|
||||
restoredCaches.push(language);
|
||||
const hit_kind =
|
||||
hitKey === primaryKey ? CacheHitKind.Exact : CacheHitKind.Partial;
|
||||
status.push({ language, hit_kind, download_duration_ms });
|
||||
} else {
|
||||
status.push({ language, hit_kind: CacheHitKind.Miss });
|
||||
logger.info(`No suitable cache found for ${language}.`);
|
||||
}
|
||||
}
|
||||
|
||||
return restoredCaches;
|
||||
return status;
|
||||
}
|
||||
|
||||
/** Enumerates possible outcomes for storing caches. */
|
||||
export enum CacheStoreResult {
|
||||
/** We were unable to calculate a hash for the key. */
|
||||
NoHash = "no-hash",
|
||||
/** There is nothing to store in the cache. */
|
||||
Empty = "empty",
|
||||
/** There already exists a cache with the key we are trying to store. */
|
||||
Duplicate = "duplicate",
|
||||
/** The cache was stored successfully. */
|
||||
Stored = "stored",
|
||||
}
|
||||
|
||||
/** Represents results of trying to upload a dependency cache for a language. */
|
||||
export interface DependencyCacheUploadStatus {
|
||||
language: Language;
|
||||
result: CacheStoreResult;
|
||||
upload_size_bytes?: number;
|
||||
upload_duration_ms?: number;
|
||||
}
|
||||
|
||||
/** An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration. */
|
||||
export type DependencyCacheUploadStatusReport = DependencyCacheUploadStatus[];
|
||||
|
||||
/**
|
||||
* Attempts to store caches for the languages that were analyzed.
|
||||
*
|
||||
* @param config The configuration for this workflow.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
|
||||
*
|
||||
* @returns An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration.
|
||||
*/
|
||||
export async function uploadDependencyCaches(
|
||||
config: Config,
|
||||
logger: Logger,
|
||||
minimizeJavaJars: boolean,
|
||||
): Promise<void> {
|
||||
): Promise<DependencyCacheUploadStatusReport> {
|
||||
const status: DependencyCacheUploadStatusReport = [];
|
||||
for (const language of config.languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
|
||||
@@ -175,6 +230,7 @@ export async function uploadDependencyCaches(
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, result: CacheStoreResult.NoHash });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
|
||||
);
|
||||
@@ -195,6 +251,7 @@ export async function uploadDependencyCaches(
|
||||
|
||||
// Skip uploading an empty cache.
|
||||
if (size === 0) {
|
||||
status.push({ language, result: CacheStoreResult.Empty });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} since it is empty.`,
|
||||
);
|
||||
@@ -208,7 +265,16 @@ export async function uploadDependencyCaches(
|
||||
);
|
||||
|
||||
try {
|
||||
const start = performance.now();
|
||||
await actionsCache.saveCache(cacheConfig.paths, key);
|
||||
const upload_duration_ms = Math.round(performance.now() - start);
|
||||
|
||||
status.push({
|
||||
language,
|
||||
result: CacheStoreResult.Stored,
|
||||
upload_size_bytes: Math.round(size),
|
||||
upload_duration_ms,
|
||||
});
|
||||
} catch (error) {
|
||||
// `ReserveCacheError` indicates that the cache key is already in use, which means that a
|
||||
// cache with that key already exists or is in the process of being uploaded by another
|
||||
@@ -218,12 +284,16 @@ export async function uploadDependencyCaches(
|
||||
`Not uploading cache for ${language}, because ${key} is already in use.`,
|
||||
);
|
||||
logger.debug(error.message);
|
||||
|
||||
status.push({ language, result: CacheStoreResult.Duplicate });
|
||||
} else {
|
||||
// Propagate other errors upwards.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -270,3 +340,34 @@ async function cachePrefix(
|
||||
|
||||
return `${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
|
||||
}
|
||||
|
||||
/** Represents information about our overall cache usage for CodeQL dependency caches. */
|
||||
export interface DependencyCachingUsageReport {
|
||||
count: number;
|
||||
size_bytes: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to determine the overall cache usage for CodeQL dependencies caches.
|
||||
*
|
||||
* @param logger The logger to log errors to.
|
||||
* @returns Returns the overall cache usage for CodeQL dependencies caches, or `undefined` if we couldn't determine it.
|
||||
*/
|
||||
export async function getDependencyCacheUsage(
|
||||
logger: Logger,
|
||||
): Promise<DependencyCachingUsageReport | undefined> {
|
||||
try {
|
||||
const caches = await listActionsCaches(CODEQL_DEPENDENCY_CACHE_PREFIX);
|
||||
const totalSize = caches.reduce(
|
||||
(acc, cache) => acc + (cache.size_in_bytes ?? 0),
|
||||
0,
|
||||
);
|
||||
return { count: caches.length, size_bytes: totalSize };
|
||||
} catch (err) {
|
||||
logger.warning(
|
||||
`Unable to retrieve information about dependency cache usage: ${getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -128,4 +128,10 @@ export enum EnvVar {
|
||||
* whether the upload is disabled. This is intended for testing and debugging purposes.
|
||||
*/
|
||||
SARIF_DUMP_DIR = "CODEQL_ACTION_SARIF_DUMP_DIR",
|
||||
|
||||
/**
|
||||
* Whether to skip uploading SARIF results to GitHub. Intended for testing purposes.
|
||||
* This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option.
|
||||
*/
|
||||
SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD",
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ export interface FeatureEnablement {
|
||||
* Legacy features should end with `_enabled`.
|
||||
*/
|
||||
export enum Feature {
|
||||
AllowToolcacheInput = "allow_toolcache_input",
|
||||
CleanupTrapCaches = "cleanup_trap_caches",
|
||||
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
||||
DiffInformedQueries = "diff_informed_queries",
|
||||
@@ -73,9 +74,9 @@ export enum Feature {
|
||||
OverlayAnalysisRust = "overlay_analysis_rust",
|
||||
OverlayAnalysisSwift = "overlay_analysis_swift",
|
||||
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
|
||||
UseRepositoryProperties = "use_repository_properties",
|
||||
QaTelemetryEnabled = "qa_telemetry_enabled",
|
||||
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
|
||||
UseRepositoryProperties = "use_repository_properties",
|
||||
}
|
||||
|
||||
export const featureConfig: Record<
|
||||
@@ -109,6 +110,11 @@ export const featureConfig: Record<
|
||||
toolsFeature?: ToolsFeature;
|
||||
}
|
||||
> = {
|
||||
[Feature.AllowToolcacheInput]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.CleanupTrapCaches]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
|
||||
@@ -2,6 +2,7 @@ import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import * as codeql from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { Feature } from "./feature-flags";
|
||||
@@ -28,12 +29,13 @@ test("post: init action with debug mode off", async (t) => {
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
} as unknown as configUtils.Config);
|
||||
sinon.stub(configUtils, "getConfig").resolves(
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
}),
|
||||
);
|
||||
|
||||
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
@@ -84,14 +86,14 @@ test("uploads failed SARIF run with `diagnostics export` if feature flag is off"
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
@@ -108,14 +110,14 @@ test("uploads failed SARIF run with `diagnostics export` if the database doesn't
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
@@ -135,14 +137,14 @@ test("uploads failed SARIF run with database export-diagnostics if the database
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
@@ -192,14 +194,14 @@ for (const { uploadInput, shouldUpload } of UPLOAD_INPUT_TEST_CASES) {
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
category: "my-category",
|
||||
upload: uploadInput,
|
||||
@@ -227,14 +229,14 @@ test("uploading failed SARIF run succeeds when workflow uses an input with a mat
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
category: "/language:${{ matrix.language }}",
|
||||
},
|
||||
@@ -254,14 +256,14 @@ test("uploading failed SARIF run fails when workflow uses a complex upload input
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v3",
|
||||
uses: "github/codeql-action/init@v4",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v3",
|
||||
uses: "github/codeql-action/analyze@v4",
|
||||
with: {
|
||||
upload: "${{ matrix.language != 'csharp' }}",
|
||||
},
|
||||
@@ -295,6 +297,17 @@ test("uploading failed SARIF run fails when workflow does not reference github/c
|
||||
t.truthy(result.upload_failed_run_stack_trace);
|
||||
});
|
||||
|
||||
test("not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", async (t) => {
|
||||
const result = await testFailedSarifUpload(t, createTestWorkflow([]), {
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(
|
||||
result.upload_failed_run_skipped_because,
|
||||
"Code Scanning is not enabled.",
|
||||
);
|
||||
});
|
||||
|
||||
function createTestWorkflow(
|
||||
steps: workflow.WorkflowJobStep[],
|
||||
): workflow.Workflow {
|
||||
@@ -327,20 +340,22 @@ async function testFailedSarifUpload(
|
||||
expectUpload = true,
|
||||
exportDiagnosticsEnabled = false,
|
||||
matrix = {},
|
||||
analysisKinds = [AnalysisKind.CodeScanning],
|
||||
}: {
|
||||
category?: string;
|
||||
databaseExists?: boolean;
|
||||
expectUpload?: boolean;
|
||||
exportDiagnosticsEnabled?: boolean;
|
||||
matrix?: { [key: string]: string };
|
||||
analysisKinds?: AnalysisKind[];
|
||||
} = {},
|
||||
): Promise<initActionPostHelper.UploadFailedSarifResult> {
|
||||
const config = {
|
||||
const config = createTestConfig({
|
||||
analysisKinds,
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
} as unknown as configUtils.Config;
|
||||
});
|
||||
if (databaseExists) {
|
||||
config.dbLocation = "path/to/database";
|
||||
}
|
||||
|
||||
@@ -7,7 +7,8 @@ import * as actionsUtil from "./actions-util";
|
||||
import { CodeScanning } from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { Config, isCodeScanningEnabled } from "./config-utils";
|
||||
import * as dependencyCaching from "./dependency-caching";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
@@ -18,8 +19,8 @@ import {
|
||||
delay,
|
||||
getErrorMessage,
|
||||
getRequiredEnvParam,
|
||||
isInTestMode,
|
||||
parseMatrixInput,
|
||||
shouldSkipSarifUpload,
|
||||
wrapError,
|
||||
} from "./util";
|
||||
import {
|
||||
@@ -45,6 +46,10 @@ export interface JobStatusReport {
|
||||
job_status: JobStatus;
|
||||
}
|
||||
|
||||
export interface DependencyCachingUsageReport {
|
||||
dependency_caching_usage?: dependencyCaching.DependencyCachingUsageReport;
|
||||
}
|
||||
|
||||
function createFailedUploadFailedSarifResult(
|
||||
error: unknown,
|
||||
): UploadFailedSarifResult {
|
||||
@@ -76,7 +81,7 @@ async function maybeUploadFailedSarif(
|
||||
!["always", "failure-only"].includes(
|
||||
actionsUtil.getUploadValue(shouldUpload),
|
||||
) ||
|
||||
isInTestMode()
|
||||
shouldSkipSarifUpload()
|
||||
) {
|
||||
return { upload_failed_run_skipped_because: "SARIF upload is disabled" };
|
||||
}
|
||||
@@ -134,6 +139,15 @@ export async function tryUploadSarifIfRunFailed(
|
||||
EnvVar.JOB_STATUS,
|
||||
process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus,
|
||||
);
|
||||
|
||||
// If the only enabled analysis kind is `code-quality`, then we shouldn't
|
||||
// upload the failed SARIF to Code Scanning.
|
||||
if (!isCodeScanningEnabled(config)) {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Code Scanning is not enabled.",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
return await maybeUploadFailedSarif(
|
||||
config,
|
||||
|
||||
@@ -12,10 +12,16 @@ import {
|
||||
printDebugLogs,
|
||||
} from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CachingKind } from "./caching-utils";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import {
|
||||
DependencyCachingUsageReport,
|
||||
getDependencyCacheUsage,
|
||||
} from "./dependency-caching";
|
||||
import { Features } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
@@ -32,7 +38,8 @@ import { checkDiskUsage, checkGitHubVersionInRange, wrapError } from "./util";
|
||||
interface InitPostStatusReport
|
||||
extends StatusReportBase,
|
||||
initActionPostHelper.UploadFailedSarifResult,
|
||||
initActionPostHelper.JobStatusReport {}
|
||||
initActionPostHelper.JobStatusReport,
|
||||
initActionPostHelper.DependencyCachingUsageReport {}
|
||||
|
||||
async function runWrapper() {
|
||||
const logger = getActionsLogger();
|
||||
@@ -41,6 +48,7 @@ async function runWrapper() {
|
||||
let uploadFailedSarifResult:
|
||||
| initActionPostHelper.UploadFailedSarifResult
|
||||
| undefined;
|
||||
let dependencyCachingUsage: DependencyCachingUsageReport | undefined;
|
||||
try {
|
||||
// Restore inputs from `init` Action.
|
||||
restoreInputs();
|
||||
@@ -73,6 +81,17 @@ async function runWrapper() {
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// If we are analysing the default branch and some kind of caching is enabled,
|
||||
// then try to determine our overall cache usage for dependency caches. We only
|
||||
// do this under these circumstances to avoid slowing down analyses for PRs
|
||||
// and where caching may not be enabled.
|
||||
if (
|
||||
(await gitUtils.isAnalyzingDefaultBranch()) &&
|
||||
config.dependencyCachingEnabled !== CachingKind.None
|
||||
) {
|
||||
dependencyCachingUsage = await getDependencyCacheUsage(logger);
|
||||
}
|
||||
}
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
@@ -109,6 +128,7 @@ async function runWrapper() {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||
dependency_caching_usage: dependencyCachingUsage,
|
||||
};
|
||||
logger.info("Sending status report for init-post step.");
|
||||
await sendStatusReport(statusReport);
|
||||
|
||||
@@ -23,7 +23,10 @@ import {
|
||||
} from "./caching-utils";
|
||||
import { CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { downloadDependencyCaches } from "./dependency-caching";
|
||||
import {
|
||||
DependencyCacheRestoreStatusReport,
|
||||
downloadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import {
|
||||
addDiagnostic,
|
||||
flushDiagnostics,
|
||||
@@ -102,6 +105,7 @@ async function sendCompletedStatusReport(
|
||||
toolsSource: ToolsSource,
|
||||
toolsVersion: string,
|
||||
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
|
||||
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
|
||||
logger: Logger,
|
||||
error?: Error,
|
||||
) {
|
||||
@@ -151,6 +155,7 @@ async function sendCompletedStatusReport(
|
||||
await getTotalCacheSize(Object.values(config.trapCaches), logger),
|
||||
),
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
);
|
||||
await sendStatusReport({
|
||||
...initWithConfigStatusReport,
|
||||
@@ -243,6 +248,7 @@ async function run() {
|
||||
getTemporaryDirectory(),
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
@@ -351,6 +357,7 @@ async function run() {
|
||||
}
|
||||
|
||||
let overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined;
|
||||
let dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined;
|
||||
try {
|
||||
if (
|
||||
config.overlayDatabaseMode === OverlayDatabaseMode.Overlay &&
|
||||
@@ -562,7 +569,7 @@ async function run() {
|
||||
codeql,
|
||||
);
|
||||
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
|
||||
await downloadDependencyCaches(
|
||||
dependencyCachingResults = await downloadDependencyCaches(
|
||||
config.languages,
|
||||
logger,
|
||||
minimizeJavaJars,
|
||||
@@ -714,6 +721,7 @@ async function run() {
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
error,
|
||||
);
|
||||
@@ -736,6 +744,7 @@ async function run() {
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getOptionalInput, isSelfHostedRunner } from "./actions-util";
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import { CodeQL, setupCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { CodeQLDefaultVersionInfo } from "./feature-flags";
|
||||
import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
@@ -23,6 +23,7 @@ export async function initCodeQL(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<{
|
||||
codeql: CodeQL;
|
||||
@@ -44,6 +45,7 @@ export async function initCodeQL(
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
true,
|
||||
);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
@@ -12,6 +13,7 @@ import {
|
||||
LoggedMessage,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
createFeatures,
|
||||
getRecordingLogger,
|
||||
initializeFeatures,
|
||||
mockBundleDownloadApi,
|
||||
@@ -90,6 +92,8 @@ test("getCodeQLActionRepository", (t) => {
|
||||
});
|
||||
|
||||
test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const tagName = "codeql-bundle-v1.2.3";
|
||||
@@ -100,6 +104,7 @@ test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) =>
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -109,6 +114,8 @@ test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) =>
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns bundled CLI version when tools == linked", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
@@ -117,6 +124,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == linked
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -128,6 +136,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == linked
|
||||
test("getCodeQLSource correctly returns bundled CLI version when tools == latest", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
@@ -137,6 +146,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == latest
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -161,6 +171,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == latest
|
||||
test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use linked tools", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
// Stub the downloadCodeQL function to prevent downloading artefacts
|
||||
// during testing from being called.
|
||||
@@ -185,6 +196,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use
|
||||
"tmp/codeql_action_test/",
|
||||
GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -207,6 +219,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use
|
||||
test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to download a non-default bundle", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
const bundleUrl =
|
||||
"https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.16.0/codeql-bundle-linux64.tar.gz";
|
||||
@@ -235,6 +248,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
|
||||
"tmp/codeql_action_test/",
|
||||
GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -254,6 +268,160 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
|
||||
});
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([Feature.AllowToolcacheInput]);
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
|
||||
const latestToolcacheVersion = "3.2.1";
|
||||
const latestVersionPath = "/path/to/latest";
|
||||
const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"];
|
||||
const findAllVersionsStub = sinon
|
||||
.stub(toolcache, "findAllVersions")
|
||||
.returns(testVersions);
|
||||
const findStub = sinon.stub(toolcache, "find");
|
||||
findStub
|
||||
.withArgs("CodeQL", latestToolcacheVersion)
|
||||
.returns(latestVersionPath);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the toolcache functions were called with the expected arguments
|
||||
t.assert(
|
||||
findAllVersionsStub.calledOnceWith("CodeQL"),
|
||||
`toolcache.findAllVersions("CodeQL") wasn't called`,
|
||||
);
|
||||
t.assert(
|
||||
findStub.calledOnceWith("CodeQL", latestToolcacheVersion),
|
||||
`toolcache.find("CodeQL", ${latestToolcacheVersion}) wasn't called`,
|
||||
);
|
||||
|
||||
// Check that `sourceType` and `toolsVersion` match expectations.
|
||||
t.is(source.sourceType, "toolcache");
|
||||
t.is(source.toolsVersion, latestToolcacheVersion);
|
||||
|
||||
// Check that key messages we would expect to find in the log are present.
|
||||
const expectedMessages: string[] = [
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`,
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`,
|
||||
`Using CodeQL CLI version ${latestToolcacheVersion} from toolcache at ${latestVersionPath}`,
|
||||
];
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
t.assert(
|
||||
loggedMessages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const toolcacheInputFallbackMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
featureList: Feature[],
|
||||
environment: Record<string, string>,
|
||||
testVersions: string[],
|
||||
expectedMessages: string[],
|
||||
) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures(featureList);
|
||||
|
||||
for (const [k, v] of Object.entries(environment)) {
|
||||
process.env[k] = v;
|
||||
}
|
||||
|
||||
const findAllVersionsStub = sinon
|
||||
.stub(toolcache, "findAllVersions")
|
||||
.returns(testVersions);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the toolcache functions were called with the expected arguments
|
||||
t.assert(
|
||||
findAllVersionsStub.calledWith("CodeQL"),
|
||||
`toolcache.findAllVersions("CodeQL") wasn't called`,
|
||||
);
|
||||
|
||||
// Check that `sourceType` and `toolsVersion` match expectations.
|
||||
t.is(source.sourceType, "download");
|
||||
t.is(source.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||
|
||||
// Check that key messages we would expect to find in the log are present.
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
t.assert(
|
||||
loggedMessages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") =>
|
||||
`getCodeQLSource falls back to downloading the CLI if ${providedTitle}`,
|
||||
});
|
||||
|
||||
test(
|
||||
"the toolcache doesn't have a CodeQL CLI when tools == toolcache",
|
||||
toolcacheInputFallbackMacro,
|
||||
[Feature.AllowToolcacheInput],
|
||||
{ GITHUB_EVENT_NAME: "dynamic" },
|
||||
[],
|
||||
[
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`,
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: toolcache'...`,
|
||||
],
|
||||
);
|
||||
|
||||
test(
|
||||
"the workflow trigger is not `dynamic`",
|
||||
toolcacheInputFallbackMacro,
|
||||
[Feature.AllowToolcacheInput],
|
||||
{ GITHUB_EVENT_NAME: "pull_request" },
|
||||
[],
|
||||
[
|
||||
`Ignoring 'tools: toolcache' because the workflow was not triggered dynamically.`,
|
||||
],
|
||||
);
|
||||
|
||||
test(
|
||||
"the feature flag is not enabled",
|
||||
toolcacheInputFallbackMacro,
|
||||
[],
|
||||
{ GITHUB_EVENT_NAME: "dynamic" },
|
||||
[],
|
||||
[`Ignoring 'tools: toolcache' because the feature is not enabled.`],
|
||||
);
|
||||
|
||||
test('tryGetTagNameFromUrl extracts the right tag name for a repo name containing "codeql-bundle"', (t) => {
|
||||
t.is(
|
||||
setupCodeql.tryGetTagNameFromUrl(
|
||||
@@ -263,3 +431,15 @@ test('tryGetTagNameFromUrl extracts the right tag name for a repo name containin
|
||||
"codeql-bundle-v2.19.0",
|
||||
);
|
||||
});
|
||||
|
||||
test("getLatestToolcacheVersion returns undefined if there are no CodeQL CLIs in the toolcache", (t) => {
|
||||
sinon.stub(toolcache, "findAllVersions").returns([]);
|
||||
t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), undefined);
|
||||
});
|
||||
|
||||
test("getLatestToolcacheVersion returns latest version in the toolcache", (t) => {
|
||||
const testVersions = ["2.3.1", "3.2.1", "1.2.3"];
|
||||
sinon.stub(toolcache, "findAllVersions").returns(testVersions);
|
||||
|
||||
t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), "3.2.1");
|
||||
});
|
||||
|
||||
@@ -7,12 +7,14 @@ import { default as deepEqual } from "fast-deep-equal";
|
||||
import * as semver from "semver";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
|
||||
import { isRunningLocalAction } from "./actions-util";
|
||||
import { isDynamicWorkflow, isRunningLocalAction } from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import {
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
CodeQLDefaultVersionInfo,
|
||||
Feature,
|
||||
FeatureEnablement,
|
||||
} from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as tar from "./tar";
|
||||
@@ -38,6 +40,7 @@ const CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
|
||||
const CODEQL_BUNDLE_VERSION_ALIAS: string[] = ["linked", "latest"];
|
||||
const CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
const CODEQL_TOOLCACHE_INPUT = "toolcache";
|
||||
|
||||
function getCodeQLBundleExtension(
|
||||
compressionMethod: tar.CompressionMethod,
|
||||
@@ -275,6 +278,7 @@ export async function getCodeQLSource(
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
variant: util.GitHubVariant,
|
||||
tarSupportsZstd: boolean,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<CodeQLToolsSource> {
|
||||
if (
|
||||
@@ -346,6 +350,54 @@ export async function getCodeQLSource(
|
||||
"`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required.",
|
||||
);
|
||||
}
|
||||
} else if (
|
||||
toolsInput !== undefined &&
|
||||
toolsInput === CODEQL_TOOLCACHE_INPUT
|
||||
) {
|
||||
let latestToolcacheVersion: string | undefined;
|
||||
|
||||
// We only allow `toolsInput === "toolcache"` for `dynamic` events. In general, using `toolsInput === "toolcache"`
|
||||
// can lead to alert wobble and so it shouldn't be used for an analysis where results are intended to be uploaded.
|
||||
// We also allow this in test mode.
|
||||
const allowToolcacheValueFF = await features.getValue(
|
||||
Feature.AllowToolcacheInput,
|
||||
);
|
||||
const allowToolcacheValue =
|
||||
allowToolcacheValueFF && (isDynamicWorkflow() || util.isInTestMode());
|
||||
if (allowToolcacheValue) {
|
||||
// If `toolsInput === "toolcache"`, try to find the latest version of the CLI that's available in the toolcache
|
||||
// and use that. We perform this check here since we can set `cliVersion` directly and don't want to default to
|
||||
// the linked version.
|
||||
logger.info(
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.`,
|
||||
);
|
||||
|
||||
latestToolcacheVersion = getLatestToolcacheVersion(logger);
|
||||
if (latestToolcacheVersion) {
|
||||
cliVersion = latestToolcacheVersion;
|
||||
}
|
||||
}
|
||||
|
||||
if (latestToolcacheVersion === undefined) {
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...`,
|
||||
);
|
||||
} else {
|
||||
if (allowToolcacheValueFF) {
|
||||
logger.warning(
|
||||
`Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.`,
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Ignoring 'tools: ${toolsInput}' because the feature is not enabled.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
cliVersion = defaultCliVersion.cliVersion;
|
||||
tagName = defaultCliVersion.tagName;
|
||||
}
|
||||
} else if (toolsInput !== undefined) {
|
||||
// If a tools URL was provided, then use that.
|
||||
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||
@@ -696,6 +748,7 @@ export async function setupCodeQLBundle(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
) {
|
||||
if (!(await util.isBinaryAccessible("tar", logger))) {
|
||||
@@ -711,6 +764,7 @@ export async function setupCodeQLBundle(
|
||||
apiDetails,
|
||||
variant,
|
||||
zstdAvailability.available,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -816,9 +870,38 @@ async function getNightlyToolsUrl(logger: Logger) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the latest version of the CodeQL CLI that is available in the toolcache, or `undefined`
|
||||
* if no CodeQL CLI is available in the toolcache.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @returns The latest version of the CodeQL CLI that is available in the toolcache, or `undefined` if there is none.
|
||||
*/
|
||||
export function getLatestToolcacheVersion(logger: Logger): string | undefined {
|
||||
const allVersions = toolcache
|
||||
.findAllVersions("CodeQL")
|
||||
.sort((a, b) => semver.compare(b, a));
|
||||
logger.debug(
|
||||
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
|
||||
allVersions,
|
||||
)}.`,
|
||||
);
|
||||
|
||||
if (allVersions.length > 0) {
|
||||
const latestToolcacheVersion = allVersions[0];
|
||||
logger.info(
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`,
|
||||
);
|
||||
return latestToolcacheVersion;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isReservedToolsValue(tools: string): boolean {
|
||||
return (
|
||||
CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) ||
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools)
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) ||
|
||||
tools === CODEQL_TOOLCACHE_INPUT
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,13 +7,23 @@ import { pki } from "node-forge";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getApiDetails, getAuthorizationHeaderFor } from "./api-client";
|
||||
import { Config } from "./config-utils";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import {
|
||||
Credential,
|
||||
getCredentials,
|
||||
getDownloadUrl,
|
||||
parseLanguage,
|
||||
UPDATEJOB_PROXY,
|
||||
} from "./start-proxy";
|
||||
import {
|
||||
ActionName,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
StatusReportBase,
|
||||
} from "./status-report";
|
||||
import * as util from "./util";
|
||||
|
||||
const KEY_SIZE = 2048;
|
||||
@@ -83,46 +93,111 @@ function generateCertificateAuthority(): CertificateAuthority {
|
||||
return { cert: pem, key };
|
||||
}
|
||||
|
||||
interface StartProxyStatus extends StatusReportBase {
|
||||
// A comma-separated list of registry types which are configured for CodeQL.
|
||||
// This only includes registry types we support, not all that are configured.
|
||||
registry_types: string;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
config: Partial<Config>,
|
||||
registry_types: string[],
|
||||
logger: Logger,
|
||||
) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
startedAt,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: StartProxyStatus = {
|
||||
...statusReportBase,
|
||||
registry_types: registry_types.join(","),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
|
||||
// Make inputs accessible in the `post` step.
|
||||
actionsUtil.persistInputs();
|
||||
|
||||
const logger = getActionsLogger();
|
||||
let language: KnownLanguage | undefined;
|
||||
|
||||
// Setup logging for the proxy
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
const proxyLogFilePath = path.resolve(tempDir, "proxy.log");
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
try {
|
||||
// Setup logging for the proxy
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
const proxyLogFilePath = path.resolve(tempDir, "proxy.log");
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
|
||||
// Get the configuration options
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
actionsUtil.getOptionalInput("registries_credentials"),
|
||||
actionsUtil.getOptionalInput("language"),
|
||||
);
|
||||
// Get the configuration options
|
||||
const languageInput = actionsUtil.getOptionalInput("language");
|
||||
language = languageInput ? parseLanguage(languageInput) : undefined;
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
actionsUtil.getOptionalInput("registries_credentials"),
|
||||
language,
|
||||
);
|
||||
|
||||
if (credentials.length === 0) {
|
||||
logger.info("No credentials found, skipping proxy setup.");
|
||||
return;
|
||||
if (credentials.length === 0) {
|
||||
logger.info("No credentials found, skipping proxy setup.");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Credentials loaded for the following registries:\n ${credentials
|
||||
.map((c) => credentialToStr(c))
|
||||
.join("\n")}`,
|
||||
);
|
||||
|
||||
const ca = generateCertificateAuthority();
|
||||
|
||||
const proxyConfig: ProxyConfig = {
|
||||
all_credentials: credentials,
|
||||
ca,
|
||||
};
|
||||
|
||||
// Start the Proxy
|
||||
const proxyBin = await getProxyBinaryPath(logger);
|
||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||
|
||||
// Report success if we have reached this point.
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
proxyConfig.all_credentials.map((c) => c.type),
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
const error = util.wrapError(unwrappedError);
|
||||
core.setFailed(`start-proxy action failed: ${error.message}`);
|
||||
|
||||
// We skip sending the error message and stack trace here to avoid the possibility
|
||||
// of leaking any sensitive information into the telemetry.
|
||||
const errorStatusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (errorStatusReportBase !== undefined) {
|
||||
await sendStatusReport(errorStatusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Credentials loaded for the following registries:\n ${credentials
|
||||
.map((c) => credentialToStr(c))
|
||||
.join("\n")}`,
|
||||
);
|
||||
|
||||
const ca = generateCertificateAuthority();
|
||||
|
||||
const proxyConfig: ProxyConfig = {
|
||||
all_credentials: credentials,
|
||||
ca,
|
||||
};
|
||||
|
||||
// Start the Proxy
|
||||
const proxyBin = await getProxyBinaryPath(logger);
|
||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||
}
|
||||
|
||||
async function startProxy(
|
||||
@@ -133,57 +208,53 @@ async function startProxy(
|
||||
) {
|
||||
const host = "127.0.0.1";
|
||||
let port = 49152;
|
||||
try {
|
||||
let subprocess: ChildProcess | undefined = undefined;
|
||||
let tries = 5;
|
||||
let subprocessError: Error | undefined = undefined;
|
||||
while (tries-- > 0 && !subprocess && !subprocessError) {
|
||||
subprocess = spawn(
|
||||
binPath,
|
||||
["-addr", `${host}:${port}`, "-config", "-", "-logfile", logFilePath],
|
||||
{
|
||||
detached: true,
|
||||
stdio: ["pipe", "ignore", "ignore"],
|
||||
},
|
||||
);
|
||||
subprocess.unref();
|
||||
if (subprocess.pid) {
|
||||
core.saveState("proxy-process-pid", `${subprocess.pid}`);
|
||||
let subprocess: ChildProcess | undefined = undefined;
|
||||
let tries = 5;
|
||||
let subprocessError: Error | undefined = undefined;
|
||||
while (tries-- > 0 && !subprocess && !subprocessError) {
|
||||
subprocess = spawn(
|
||||
binPath,
|
||||
["-addr", `${host}:${port}`, "-config", "-", "-logfile", logFilePath],
|
||||
{
|
||||
detached: true,
|
||||
stdio: ["pipe", "ignore", "ignore"],
|
||||
},
|
||||
);
|
||||
subprocess.unref();
|
||||
if (subprocess.pid) {
|
||||
core.saveState("proxy-process-pid", `${subprocess.pid}`);
|
||||
}
|
||||
subprocess.on("error", (error) => {
|
||||
subprocessError = error;
|
||||
});
|
||||
subprocess.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
// If the proxy failed to start, try a different port from the ephemeral range [49152, 65535]
|
||||
port = Math.floor(Math.random() * (65535 - 49152) + 49152);
|
||||
subprocess = undefined;
|
||||
}
|
||||
subprocess.on("error", (error) => {
|
||||
subprocessError = error;
|
||||
});
|
||||
subprocess.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
// If the proxy failed to start, try a different port from the ephemeral range [49152, 65535]
|
||||
port = Math.floor(Math.random() * (65535 - 49152) + 49152);
|
||||
subprocess = undefined;
|
||||
}
|
||||
});
|
||||
subprocess.stdin?.write(JSON.stringify(config));
|
||||
subprocess.stdin?.end();
|
||||
// Wait a little to allow the proxy to start
|
||||
await util.delay(1000);
|
||||
}
|
||||
if (subprocessError) {
|
||||
// eslint-disable-next-line @typescript-eslint/only-throw-error
|
||||
throw subprocessError;
|
||||
}
|
||||
logger.info(`Proxy started on ${host}:${port}`);
|
||||
core.setOutput("proxy_host", host);
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
} catch (error) {
|
||||
core.setFailed(`start-proxy action failed: ${util.getErrorMessage(error)}`);
|
||||
});
|
||||
subprocess.stdin?.write(JSON.stringify(config));
|
||||
subprocess.stdin?.end();
|
||||
// Wait a little to allow the proxy to start
|
||||
await util.delay(1000);
|
||||
}
|
||||
if (subprocessError) {
|
||||
// eslint-disable-next-line @typescript-eslint/only-throw-error
|
||||
throw subprocessError;
|
||||
}
|
||||
logger.info(`Proxy started on ${host}:${port}`);
|
||||
core.setOutput("proxy_host", host);
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
}
|
||||
|
||||
async function getProxyBinaryPath(logger: Logger): Promise<string> {
|
||||
|
||||
@@ -109,7 +109,7 @@ test("getCredentials filters by language when specified", async (t) => {
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON(mixedCredentials),
|
||||
"java",
|
||||
KnownLanguage.java,
|
||||
);
|
||||
t.is(credentials.length, 1);
|
||||
t.is(credentials[0].type, "maven_repository");
|
||||
@@ -120,7 +120,7 @@ test("getCredentials returns all for a language when specified", async (t) => {
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON(mixedCredentials),
|
||||
"go",
|
||||
KnownLanguage.go,
|
||||
);
|
||||
t.is(credentials.length, 2);
|
||||
|
||||
|
||||
@@ -79,9 +79,8 @@ export function getCredentials(
|
||||
logger: Logger,
|
||||
registrySecrets: string | undefined,
|
||||
registriesCredentials: string | undefined,
|
||||
languageString: string | undefined,
|
||||
language: KnownLanguage | undefined,
|
||||
): Credential[] {
|
||||
const language = languageString ? parseLanguage(languageString) : undefined;
|
||||
const registryTypeForLanguage = language
|
||||
? LANGUAGE_TO_REGISTRY_TYPE[language]
|
||||
: undefined;
|
||||
|
||||
@@ -92,6 +92,49 @@ test("createStatusReportBase", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase - empty configuration", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
{},
|
||||
{ numAvailableBytes: 100, numTotalBytes: 500 },
|
||||
getRunnerLogger(false),
|
||||
);
|
||||
|
||||
if (t.truthy(statusReport)) {
|
||||
t.is(statusReport.action_name, ActionName.StartProxy);
|
||||
t.is(statusReport.status, "success");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase - partial configuration", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
{
|
||||
languages: ["go"],
|
||||
},
|
||||
{ numAvailableBytes: 100, numTotalBytes: 500 },
|
||||
getRunnerLogger(false),
|
||||
);
|
||||
|
||||
if (t.truthy(statusReport)) {
|
||||
t.is(statusReport.action_name, ActionName.StartProxy);
|
||||
t.is(statusReport.status, "success");
|
||||
t.is(statusReport.languages, "go");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase_firstParty", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
@@ -286,6 +329,7 @@ const testCreateInitWithConfigStatusReport = test.macro({
|
||||
undefined,
|
||||
1024,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
|
||||
if (t.truthy(initWithConfigStatusReport)) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
} from "./actions-util";
|
||||
import { getAnalysisKey, getApiClient } from "./api-client";
|
||||
import { parseRegistriesWithoutCredentials, type Config } from "./config-utils";
|
||||
import { DependencyCacheRestoreStatusReport } from "./dependency-caching";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import { getRef } from "./git-utils";
|
||||
@@ -35,11 +36,12 @@ import {
|
||||
} from "./util";
|
||||
|
||||
export enum ActionName {
|
||||
Autobuild = "autobuild",
|
||||
Analyze = "finish",
|
||||
Autobuild = "autobuild",
|
||||
Init = "init",
|
||||
InitPost = "init-post",
|
||||
ResolveEnvironment = "resolve-environment",
|
||||
StartProxy = "start-proxy",
|
||||
UploadSarif = "upload-sarif",
|
||||
}
|
||||
|
||||
@@ -258,7 +260,7 @@ export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
status: ActionStatus,
|
||||
actionStartedAt: Date,
|
||||
config: Config | undefined,
|
||||
config: Partial<Config> | undefined,
|
||||
diskInfo: DiskUsage | undefined,
|
||||
logger: Logger,
|
||||
cause?: string,
|
||||
@@ -297,7 +299,7 @@ export async function createStatusReportBase(
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -322,7 +324,7 @@ export async function createStatusReportBase(
|
||||
}
|
||||
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
|
||||
if (diskInfo) {
|
||||
@@ -373,6 +375,12 @@ export async function createStatusReportBase(
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`,
|
||||
);
|
||||
|
||||
// Re-throw the exception in test mode. While testing, we want to know if something goes wrong here.
|
||||
if (isInTestMode()) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -497,6 +505,8 @@ export interface InitWithConfigStatusReport extends InitStatusReport {
|
||||
overlay_base_database_download_size_bytes?: number;
|
||||
/** Time taken to download the overlay-base database, in milliseconds. */
|
||||
overlay_base_database_download_duration_ms?: number;
|
||||
/** Stringified JSON object representing information about the results of restoring dependency caches. */
|
||||
dependency_caching_restore_results?: DependencyCacheRestoreStatusReport;
|
||||
/** Stringified JSON array of registry configuration objects, from the 'registries' config field
|
||||
or workflow input. **/
|
||||
registries: string;
|
||||
@@ -522,6 +532,7 @@ export async function createInitWithConfigStatusReport(
|
||||
configFile: string | undefined,
|
||||
totalCacheSize: number,
|
||||
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
|
||||
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
|
||||
): Promise<InitWithConfigStatusReport> {
|
||||
const languages = config.languages.join(",");
|
||||
const paths = (config.originalUserInput.paths || []).join(",");
|
||||
@@ -570,6 +581,7 @@ export async function createInitWithConfigStatusReport(
|
||||
overlayBaseDatabaseStats?.databaseSizeBytes,
|
||||
overlay_base_database_download_duration_ms:
|
||||
overlayBaseDatabaseStats?.databaseDownloadDurationMs,
|
||||
dependency_caching_restore_results: dependencyCachingResults,
|
||||
query_filters: JSON.stringify(
|
||||
config.originalUserInput["query-filters"] ?? [],
|
||||
),
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as github from "@actions/github";
|
||||
import { HTTPError } from "@actions/tool-cache";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import * as analyses from "./analyses";
|
||||
import { AnalysisKind, CodeQuality, CodeScanning } from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
@@ -127,27 +132,97 @@ test("finding SARIF files", async (t) => {
|
||||
fs.writeFileSync(path.join(tmpDir, "a.quality.sarif"), "");
|
||||
fs.writeFileSync(path.join(tmpDir, "dir1", "b.quality.sarif"), "");
|
||||
|
||||
const expectedSarifFiles = [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||
];
|
||||
const sarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
CodeScanning.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(sarifFiles, [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||
]);
|
||||
t.deepEqual(sarifFiles, expectedSarifFiles);
|
||||
|
||||
const expectedQualitySarifFiles = [
|
||||
path.join(tmpDir, "a.quality.sarif"),
|
||||
path.join(tmpDir, "dir1", "b.quality.sarif"),
|
||||
];
|
||||
const qualitySarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
CodeQuality.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(qualitySarifFiles, [
|
||||
path.join(tmpDir, "a.quality.sarif"),
|
||||
path.join(tmpDir, "dir1", "b.quality.sarif"),
|
||||
]);
|
||||
t.deepEqual(qualitySarifFiles, expectedQualitySarifFiles);
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeScanning],
|
||||
expectedSarifFiles,
|
||||
);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeQuality],
|
||||
expectedQualitySarifFiles,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Code Quality file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.quality.sarif");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeQuality], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Code Scanning file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.sarif");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Other file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.json");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -797,3 +872,91 @@ function createMockSarif(id?: string, tool?: string) {
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) {
|
||||
const mockData = {
|
||||
payload: { sarif: "base64data", commit_sha: "abc123" },
|
||||
owner: "test-owner",
|
||||
repo: "test-repo",
|
||||
response: {
|
||||
status: 200,
|
||||
data: { id: "uploaded-sarif-id" },
|
||||
headers: {},
|
||||
url: analysis.target,
|
||||
},
|
||||
};
|
||||
const client = github.getOctokit("123");
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
const requestStub = sinon.stub(client, "request");
|
||||
|
||||
const upload = async () =>
|
||||
uploadLib.uploadPayload(
|
||||
mockData.payload,
|
||||
{
|
||||
owner: mockData.owner,
|
||||
repo: mockData.repo,
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
analysis,
|
||||
);
|
||||
|
||||
return {
|
||||
upload,
|
||||
requestStub,
|
||||
mockData,
|
||||
};
|
||||
}
|
||||
|
||||
for (const analysis of [CodeScanning, CodeQuality]) {
|
||||
test(`uploadPayload on ${analysis.name} uploads successfully`, async (t) => {
|
||||
const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis);
|
||||
requestStub
|
||||
.withArgs(analysis.target, {
|
||||
owner: mockData.owner,
|
||||
repo: mockData.repo,
|
||||
data: mockData.payload,
|
||||
})
|
||||
.onFirstCall()
|
||||
.returns(Promise.resolve(mockData.response));
|
||||
const result = await upload();
|
||||
t.is(result, mockData.response.data.id);
|
||||
t.true(requestStub.calledOnce);
|
||||
});
|
||||
|
||||
for (const envVar of [
|
||||
"CODEQL_ACTION_SKIP_SARIF_UPLOAD",
|
||||
"CODEQL_ACTION_TEST_MODE",
|
||||
]) {
|
||||
test(`uploadPayload on ${analysis.name} skips upload when ${envVar} is set`, async (t) => {
|
||||
const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis);
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
process.env.RUNNER_TEMP = tmpDir;
|
||||
process.env[envVar] = "true";
|
||||
const result = await upload();
|
||||
t.is(result, "dummy-sarif-id");
|
||||
t.false(requestStub.called);
|
||||
|
||||
const payloadFile = path.join(tmpDir, `payload-${analysis.kind}.json`);
|
||||
t.true(fs.existsSync(payloadFile));
|
||||
|
||||
const savedPayload = JSON.parse(fs.readFileSync(payloadFile, "utf8"));
|
||||
t.deepEqual(savedPayload, mockData.payload);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
test(`uploadPayload on ${analysis.name} wraps request errors using wrapApiConfigurationError`, async (t) => {
|
||||
const { upload, requestStub } = uploadPayloadFixtures(analysis);
|
||||
const wrapApiConfigurationErrorStub = sinon.stub(
|
||||
api,
|
||||
"wrapApiConfigurationError",
|
||||
);
|
||||
const originalError = new HTTPError(404);
|
||||
const wrappedError = new Error("Wrapped error message");
|
||||
requestStub.rejects(originalError);
|
||||
wrapApiConfigurationErrorStub.withArgs(originalError).returns(wrappedError);
|
||||
await t.throwsAsync(upload, {
|
||||
is: wrappedError,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -262,6 +262,7 @@ async function combineSarifFilesUsingCLI(
|
||||
tempDir,
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -346,34 +347,36 @@ function getAutomationID(
|
||||
return api.computeAutomationID(analysis_key, environment);
|
||||
}
|
||||
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(
|
||||
/**
|
||||
* Upload the given payload.
|
||||
* If the request fails then this will retry a small number of times.
|
||||
* This is exported for testing purposes only.
|
||||
*/
|
||||
export async function uploadPayload(
|
||||
payload: any,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
logger: Logger,
|
||||
target: analyses.SARIF_UPLOAD_ENDPOINT,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
): Promise<string> {
|
||||
logger.info("Uploading results");
|
||||
|
||||
// If in test mode we don't want to upload the results
|
||||
if (util.isInTestMode()) {
|
||||
if (util.shouldSkipSarifUpload()) {
|
||||
const payloadSaveFile = path.join(
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
"payload.json",
|
||||
`payload-${analysis.kind}.json`,
|
||||
);
|
||||
logger.info(
|
||||
`In test mode. Results are not uploaded. Saving to ${payloadSaveFile}`,
|
||||
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`,
|
||||
);
|
||||
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
|
||||
fs.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
|
||||
return "test-mode-sarif-id";
|
||||
return "dummy-sarif-id";
|
||||
}
|
||||
|
||||
const client = api.getApiClient();
|
||||
|
||||
try {
|
||||
const response = await client.request(target, {
|
||||
const response = await client.request(analysis.target, {
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
data: payload,
|
||||
@@ -459,6 +462,79 @@ export function getSarifFilePaths(
|
||||
return sarifFiles;
|
||||
}
|
||||
|
||||
type GroupedSarifFiles = Partial<Record<analyses.AnalysisKind, string[]>>;
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath`, and groups them by analysis kind, following `SarifScanOrder`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param sarifPath The path of a file or directory to recursively scan for SARIF files.
|
||||
* @returns The `.sarif` files found in `sarifPath`, grouped by analysis kind.
|
||||
*/
|
||||
export async function getGroupedSarifFilePaths(
|
||||
logger: Logger,
|
||||
sarifPath: string,
|
||||
): Promise<GroupedSarifFiles> {
|
||||
const stats = fs.statSync(sarifPath, { throwIfNoEntry: false });
|
||||
|
||||
if (stats === undefined) {
|
||||
// This is always a configuration error, even for first-party runs.
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
|
||||
const results: GroupedSarifFiles = {};
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
let unassignedSarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
(name) => path.extname(name) === ".sarif",
|
||||
);
|
||||
logger.debug(
|
||||
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`,
|
||||
);
|
||||
|
||||
for (const analysisConfig of analyses.SarifScanOrder) {
|
||||
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
|
||||
analysisConfig.sarifPredicate,
|
||||
);
|
||||
if (filesForCurrentAnalysis.length > 0) {
|
||||
logger.debug(
|
||||
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`,
|
||||
);
|
||||
// Looping through the array a second time is not efficient, but more readable.
|
||||
// Change this to one loop for both calls to `filter` if this becomes a bottleneck.
|
||||
unassignedSarifFiles = unassignedSarifFiles.filter(
|
||||
(name) => !analysisConfig.sarifPredicate(name),
|
||||
);
|
||||
results[analysisConfig.kind] = filesForCurrentAnalysis;
|
||||
} else {
|
||||
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (unassignedSarifFiles.length !== 0) {
|
||||
logger.warning(
|
||||
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (const analysisConfig of analyses.SarifScanOrder) {
|
||||
if (
|
||||
analysisConfig.kind === analyses.AnalysisKind.CodeScanning ||
|
||||
analysisConfig.sarifPredicate(sarifPath)
|
||||
) {
|
||||
logger.debug(
|
||||
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`,
|
||||
);
|
||||
results[analysisConfig.kind] = [sarifPath];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif: string): number {
|
||||
let numResults = 0;
|
||||
@@ -655,6 +731,7 @@ export async function uploadSpecifiedFiles(
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
|
||||
let sarif: SarifFile;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
|
||||
if (sarifPaths.length > 1) {
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
@@ -733,7 +810,7 @@ export async function uploadSpecifiedFiles(
|
||||
payload,
|
||||
getRepositoryNwo(),
|
||||
logger,
|
||||
uploadTarget.target,
|
||||
uploadTarget,
|
||||
);
|
||||
|
||||
logger.endGroup();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
@@ -18,13 +16,14 @@ import {
|
||||
isThirdPartyAnalysis,
|
||||
} from "./status-report";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { uploadSarif } from "./upload-sarif";
|
||||
import {
|
||||
ConfigurationError,
|
||||
checkActionVersion,
|
||||
checkDiskUsage,
|
||||
getErrorMessage,
|
||||
initializeEnvironment,
|
||||
isInTestMode,
|
||||
shouldSkipSarifUpload,
|
||||
wrapError,
|
||||
} from "./util";
|
||||
|
||||
@@ -32,60 +31,6 @@ interface UploadSarifStatusReport
|
||||
extends StatusReportBase,
|
||||
upload_lib.UploadStatusReport {}
|
||||
|
||||
/**
|
||||
* Searches for SARIF files for the given `analysis` in the given `sarifPath`.
|
||||
* If any are found, then they are uploaded to the appropriate endpoint for the given `analysis`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about FFs.
|
||||
* @param sarifPath The path to a SARIF file or directory containing SARIF files.
|
||||
* @param pathStats Information about `sarifPath`.
|
||||
* @param checkoutPath The checkout path.
|
||||
* @param analysis The configuration of the analysis we should upload SARIF files for.
|
||||
* @param category The SARIF category to use for the upload.
|
||||
* @returns The result of uploading the SARIF file(s) or `undefined` if there are none.
|
||||
*/
|
||||
async function findAndUpload(
|
||||
logger: Logger,
|
||||
features: Features,
|
||||
sarifPath: string,
|
||||
pathStats: fs.Stats,
|
||||
checkoutPath: string,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
category?: string,
|
||||
): Promise<upload_lib.UploadResult | undefined> {
|
||||
let sarifFiles: string[] | undefined;
|
||||
|
||||
if (pathStats.isDirectory()) {
|
||||
sarifFiles = upload_lib.findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analysis.sarifPredicate,
|
||||
);
|
||||
} else if (
|
||||
pathStats.isFile() &&
|
||||
(analysis.sarifPredicate(sarifPath) ||
|
||||
(analysis.kind === analyses.AnalysisKind.CodeScanning &&
|
||||
!analyses.CodeQuality.sarifPredicate(sarifPath)))
|
||||
) {
|
||||
sarifFiles = [sarifPath];
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (sarifFiles.length !== 0) {
|
||||
return await upload_lib.uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysis,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
uploadStats: upload_lib.UploadStatusReport,
|
||||
@@ -144,56 +89,39 @@ async function run() {
|
||||
const sarifPath = actionsUtil.getRequiredInput("sarif_file");
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
const pathStats = fs.lstatSync(sarifPath, { throwIfNoEntry: false });
|
||||
|
||||
if (pathStats === undefined) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
|
||||
}
|
||||
|
||||
const sarifIds: Array<{ analysis: string; id: string }> = [];
|
||||
const uploadResult = await findAndUpload(
|
||||
const uploadResults = await uploadSarif(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeScanning,
|
||||
sarifPath,
|
||||
category,
|
||||
);
|
||||
if (uploadResult !== undefined) {
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeScanning,
|
||||
id: uploadResult.sarifID,
|
||||
});
|
||||
|
||||
// Fail if we didn't upload anything.
|
||||
if (Object.keys(uploadResults).length === 0) {
|
||||
throw new ConfigurationError(
|
||||
`No SARIF files found to upload in "${sarifPath}".`,
|
||||
);
|
||||
}
|
||||
|
||||
// If there are `.quality.sarif` files in `sarifPath`, then upload those to the code quality service.
|
||||
const qualityUploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeQuality,
|
||||
actionsUtil.fixCodeQualityCategory(logger, category),
|
||||
);
|
||||
if (qualityUploadResult !== undefined) {
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeQuality,
|
||||
id: qualityUploadResult.sarifID,
|
||||
});
|
||||
const codeScanningResult =
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning];
|
||||
if (codeScanningResult !== undefined) {
|
||||
core.setOutput("sarif-id", codeScanningResult.sarifID);
|
||||
}
|
||||
core.setOutput("sarif-ids", JSON.stringify(sarifIds));
|
||||
core.setOutput("sarif-ids", JSON.stringify(uploadResults));
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
if (shouldSkipSarifUpload()) {
|
||||
core.debug(
|
||||
"SARIF upload disabled by an environment variable. Waiting for processing is disabled.",
|
||||
);
|
||||
} else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
if (uploadResult !== undefined) {
|
||||
if (codeScanningResult !== undefined) {
|
||||
await upload_lib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
codeScanningResult.sarifID,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
@@ -202,7 +130,7 @@ async function run() {
|
||||
}
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
uploadResult?.statusReport || {},
|
||||
codeScanningResult?.statusReport || {},
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
|
||||
185
src/upload-sarif.test.ts
Normal file
185
src/upload-sarif.test.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { AnalysisKind, getAnalysisConfig } from "./analyses";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { createFeatures, setupTests } from "./testing-utils";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { uploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
interface UploadSarifExpectedResult {
|
||||
uploadResult?: UploadResult;
|
||||
expectedFiles?: string[];
|
||||
}
|
||||
|
||||
const uploadSarifMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
sarifFiles: string[],
|
||||
sarifPath: (tempDir: string) => string = (tempDir) => tempDir,
|
||||
expectedResult: Partial<Record<AnalysisKind, UploadSarifExpectedResult>>,
|
||||
) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const testPath = sarifPath(tempDir);
|
||||
const features = createFeatures([]);
|
||||
|
||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
||||
|
||||
const uploadSpecifiedFiles = sinon.stub(
|
||||
uploadLib,
|
||||
"uploadSpecifiedFiles",
|
||||
);
|
||||
|
||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||
uploadSpecifiedFiles
|
||||
.withArgs(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
)
|
||||
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
|
||||
}
|
||||
|
||||
const fullSarifPaths = sarifFiles.map(toFullPath);
|
||||
for (const sarifFile of fullSarifPaths) {
|
||||
fs.writeFileSync(sarifFile, "");
|
||||
}
|
||||
|
||||
const actual = await uploadSarif(logger, features, "", testPath);
|
||||
|
||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||
const analysisKindResult = expectedResult[analysisKind];
|
||||
if (analysisKindResult) {
|
||||
// We are expecting a result for this analysis kind, check that we have it.
|
||||
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
|
||||
// Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths
|
||||
// that we expected it to be called with.
|
||||
t.assert(
|
||||
uploadSpecifiedFiles.calledWith(
|
||||
analysisKindResult.expectedFiles?.map(toFullPath) ??
|
||||
fullSarifPaths,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
|
||||
// is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this
|
||||
// analysis kind.
|
||||
t.is(actual[analysisKind], undefined);
|
||||
// Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind.
|
||||
t.assert(
|
||||
!uploadSpecifiedFiles.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
),
|
||||
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") => `uploadSarif - ${providedTitle}`,
|
||||
});
|
||||
|
||||
test(
|
||||
"SARIF file",
|
||||
uploadSarifMacro,
|
||||
["test.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.sarif"),
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"JSON file",
|
||||
uploadSarifMacro,
|
||||
["test.json"],
|
||||
(tempDir) => path.join(tempDir, "test.json"),
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Code Scanning files",
|
||||
uploadSarifMacro,
|
||||
["test.json", "test.sarif"],
|
||||
undefined,
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
expectedFiles: ["test.sarif"],
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Code Quality file",
|
||||
uploadSarifMacro,
|
||||
["test.quality.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.quality.sarif"),
|
||||
{
|
||||
"code-quality": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-quality-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Mixed files",
|
||||
uploadSarifMacro,
|
||||
["test.sarif", "test.quality.sarif"],
|
||||
undefined,
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
expectedFiles: ["test.sarif"],
|
||||
},
|
||||
"code-quality": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-quality-sarif",
|
||||
},
|
||||
expectedFiles: ["test.quality.sarif"],
|
||||
},
|
||||
},
|
||||
);
|
||||
51
src/upload-sarif.ts
Normal file
51
src/upload-sarif.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import * as analyses from "./analyses";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { unsafeEntriesInvariant } from "./util";
|
||||
|
||||
// Maps analysis kinds to SARIF IDs.
|
||||
export type UploadSarifResults = Partial<
|
||||
Record<analyses.AnalysisKind, upload_lib.UploadResult>
|
||||
>;
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about enabled features.
|
||||
* @param checkoutPath The path where the repository was checked out at.
|
||||
* @param sarifPath The path to the file or directory to upload.
|
||||
* @param category The analysis category.
|
||||
*
|
||||
* @returns A partial mapping from analysis kinds to the upload results.
|
||||
*/
|
||||
export async function uploadSarif(
|
||||
logger: Logger,
|
||||
features: FeatureEnablement,
|
||||
checkoutPath: string,
|
||||
sarifPath: string,
|
||||
category?: string,
|
||||
): Promise<UploadSarifResults> {
|
||||
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
|
||||
logger,
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
const uploadResults: UploadSarifResults = {};
|
||||
for (const [analysisKind, sarifFiles] of unsafeEntriesInvariant(
|
||||
sarifGroups,
|
||||
)) {
|
||||
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
|
||||
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysisConfig,
|
||||
);
|
||||
}
|
||||
|
||||
return uploadResults;
|
||||
}
|
||||
33
src/util.ts
33
src/util.ts
@@ -764,12 +764,19 @@ export function isGoodVersion(versionSpec: string) {
|
||||
/**
|
||||
* Returns whether we are in test mode. This is used by CodeQL Action PR checks.
|
||||
*
|
||||
* In test mode, we don't upload SARIF results or status reports to the GitHub API.
|
||||
* In test mode, we skip several uploads (SARIF results, status reports, DBs, ...).
|
||||
*/
|
||||
export function isInTestMode(): boolean {
|
||||
return process.env[EnvVar.TEST_MODE] === "true";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether we specifically want to skip uploading SARIF files.
|
||||
*/
|
||||
export function shouldSkipSarifUpload(): boolean {
|
||||
return isInTestMode() || process.env[EnvVar.SKIP_SARIF_UPLOAD] === "true";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the testing environment.
|
||||
*
|
||||
@@ -1287,3 +1294,27 @@ export async function asyncSome<T>(
|
||||
export function isDefined<T>(value: T | null | undefined): value is T {
|
||||
return value !== undefined && value !== null;
|
||||
}
|
||||
|
||||
/** Like `Object.keys`, but typed so that the elements of the resulting array have the
|
||||
* same type as the keys of the input object. Note that this may not be sound if the input
|
||||
* object has been cast to `T` from a subtype of `T` and contains additional keys that
|
||||
* are not represented by `keyof T`.
|
||||
*/
|
||||
export function unsafeKeysInvariant<T extends Record<string, any>>(
|
||||
object: T,
|
||||
): Array<keyof T> {
|
||||
return Object.keys(object) as Array<keyof T>;
|
||||
}
|
||||
|
||||
/** Like `Object.entries`, but typed so that the key elements of the result have the
|
||||
* same type as the keys of the input object. Note that this may not be sound if the input
|
||||
* object has been cast to `T` from a subtype of `T` and contains additional keys that
|
||||
* are not represented by `keyof T`.
|
||||
*/
|
||||
export function unsafeEntriesInvariant<T extends Record<string, any>>(
|
||||
object: T,
|
||||
): Array<[keyof T, Exclude<T[keyof T], undefined>]> {
|
||||
return Object.entries(object).filter(
|
||||
([_, val]) => val !== undefined,
|
||||
) as Array<[keyof T, Exclude<T[keyof T], undefined>]>;
|
||||
}
|
||||
|
||||
@@ -395,9 +395,9 @@ async function testLanguageAliases(
|
||||
},
|
||||
},
|
||||
steps: [
|
||||
{ uses: "actions/checkout@v3" },
|
||||
{ uses: "github/codeql-action/init@v3" },
|
||||
{ uses: "github/codeql-action/analyze@v3" },
|
||||
{ uses: "actions/checkout@v4" },
|
||||
{ uses: "github/codeql-action/init@v4" },
|
||||
{ uses: "github/codeql-action/analyze@v4" },
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -666,7 +666,7 @@ test("getWorkflowErrors() should report a warning if different versions of the C
|
||||
analyze:
|
||||
steps:
|
||||
- uses: github/codeql-action/init@v2
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
`) as Workflow,
|
||||
await getCodeQLForTesting(),
|
||||
);
|
||||
@@ -686,8 +686,8 @@ test("getWorkflowErrors() should not report a warning if the same versions of th
|
||||
jobs:
|
||||
analyze:
|
||||
steps:
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/init@v4
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
`) as Workflow,
|
||||
await getCodeQLForTesting(),
|
||||
);
|
||||
@@ -706,7 +706,7 @@ test("getWorkflowErrors() should not report a warning involving versions of othe
|
||||
analyze:
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/init@v4
|
||||
`) as Workflow,
|
||||
await getCodeQLForTesting(),
|
||||
);
|
||||
@@ -723,9 +723,9 @@ test("getCategoryInputOrThrow returns category for simple workflow with category
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: some-category
|
||||
`) as Workflow,
|
||||
@@ -745,9 +745,9 @@ test("getCategoryInputOrThrow returns undefined for simple workflow without cate
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
`) as Workflow,
|
||||
"analysis",
|
||||
{},
|
||||
@@ -765,19 +765,19 @@ test("getCategoryInputOrThrow returns category for workflow with multiple jobs",
|
||||
foo:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- runs: ./build foo
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: foo-category
|
||||
bar:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- runs: ./build bar
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: bar-category
|
||||
`) as Workflow,
|
||||
@@ -800,11 +800,11 @@ test("getCategoryInputOrThrow finds category for workflow with language matrix",
|
||||
matrix:
|
||||
language: [javascript, python]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
with:
|
||||
language: \${{ matrix.language }}
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "/language:\${{ matrix.language }}"
|
||||
`) as Workflow,
|
||||
@@ -824,9 +824,9 @@ test("getCategoryInputOrThrow throws error for workflow with dynamic category",
|
||||
jobs:
|
||||
analysis:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "\${{ github.workflow }}"
|
||||
`) as Workflow,
|
||||
@@ -851,12 +851,12 @@ test("getCategoryInputOrThrow throws error for workflow with multiple calls to a
|
||||
analysis:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: github/codeql-action/init@v3
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: github/codeql-action/init@v4
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: some-category
|
||||
- uses: github/codeql-action/analyze@v3
|
||||
- uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: another-category
|
||||
`) as Workflow,
|
||||
|
||||
@@ -16,6 +16,9 @@ inputs:
|
||||
language:
|
||||
description: The programming language to setup the proxy for the correct ecosystem
|
||||
required: false
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
required: false
|
||||
outputs:
|
||||
proxy_host:
|
||||
description: The IP address of the proxy
|
||||
@@ -26,6 +29,6 @@ outputs:
|
||||
proxy_urls:
|
||||
description: A stringified JSON array of objects containing the types and URLs of the configured registries.
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: "../lib/start-proxy-action.js"
|
||||
post: "../lib/start-proxy-action-post.js"
|
||||
|
||||
@@ -41,6 +41,6 @@ outputs:
|
||||
|
||||
{ "code-scanning": "some-id", "code-quality": "some-other-id" }
|
||||
runs:
|
||||
using: node20
|
||||
using: node24
|
||||
main: '../lib/upload-sarif-action.js'
|
||||
post: '../lib/upload-sarif-action-post.js'
|
||||
|
||||
Reference in New Issue
Block a user