mirror of
https://github.com/github/codeql-action.git
synced 2025-12-25 08:40:12 +08:00
Compare commits
206 Commits
v4.30.7
...
esbena/deb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28e47ba26d | ||
|
|
ae78991f55 | ||
|
|
dd565f3332 | ||
|
|
fa46f22b12 | ||
|
|
4e94bd11f7 | ||
|
|
8f11182164 | ||
|
|
1d36546c14 | ||
|
|
08ada26e6a | ||
|
|
b843cbeed0 | ||
|
|
1ecd563919 | ||
|
|
e576807920 | ||
|
|
ad35676669 | ||
|
|
d75645b13f | ||
|
|
710606cc35 | ||
|
|
f0452d5366 | ||
|
|
956c56734d | ||
|
|
b9cd36824e | ||
|
|
22d29ca74d | ||
|
|
9625890712 | ||
|
|
690d276755 | ||
|
|
1c3c8066c3 | ||
|
|
da64a41e37 | ||
|
|
8376af204a | ||
|
|
f48b54af10 | ||
|
|
40b4cdd21f | ||
|
|
e849c567ec | ||
|
|
d1b51f05c9 | ||
|
|
aed27f7231 | ||
|
|
8ff870a6c2 | ||
|
|
6f0fcbeea7 | ||
|
|
89d3359017 | ||
|
|
d79c0a1339 | ||
|
|
5e37670026 | ||
|
|
def04c1c0e | ||
|
|
12f3cfef09 | ||
|
|
c2bec36917 | ||
|
|
14139c9f77 | ||
|
|
596de7f1bc | ||
|
|
899bf2fd1e | ||
|
|
6fbdd5f4e9 | ||
|
|
489ed914f1 | ||
|
|
42642085de | ||
|
|
4bd7dfe989 | ||
|
|
ebd514f490 | ||
|
|
e5f165b8f5 | ||
|
|
c98d5a9a4f | ||
|
|
b7c814cb39 | ||
|
|
f88cb01694 | ||
|
|
3cd3374657 | ||
|
|
3934593862 | ||
|
|
bab3f2b5f5 | ||
|
|
9924f476ba | ||
|
|
bd5f49c7ca | ||
|
|
02b2c3aafc | ||
|
|
aa048acb05 | ||
|
|
0c5185d061 | ||
|
|
79ed9569a3 | ||
|
|
8e53c48f94 | ||
|
|
804fc665f9 | ||
|
|
e6e649a8f3 | ||
|
|
40e26468f3 | ||
|
|
9b0ac1cc3b | ||
|
|
ffed63adb8 | ||
|
|
bee06ec042 | ||
|
|
06f31ec789 | ||
|
|
53588c5ad2 | ||
|
|
2357c43cad | ||
|
|
a3ff966dbf | ||
|
|
6562050a4e | ||
|
|
e9daf5bcd9 | ||
|
|
c13672ee32 | ||
|
|
f2f52d0d47 | ||
|
|
08e53bec85 | ||
|
|
519594fe94 | ||
|
|
8c324fe288 | ||
|
|
a6b9514fab | ||
|
|
c64c4070cc | ||
|
|
d88a5540c3 | ||
|
|
aa0f6ea898 | ||
|
|
b03dcd5d9d | ||
|
|
16140ae1a1 | ||
|
|
30db5fee08 | ||
|
|
9ce56a247f | ||
|
|
2c8f4891d1 | ||
|
|
d7a8ae5fdd | ||
|
|
0822fb12e7 | ||
|
|
913cd47984 | ||
|
|
4f14649ced | ||
|
|
ac922ab562 | ||
|
|
66df0bc515 | ||
|
|
70205d3d12 | ||
|
|
697c209bfc | ||
|
|
1bd53ba38c | ||
|
|
cac4df0c79 | ||
|
|
77e5c0d0a2 | ||
|
|
97a4f751be | ||
|
|
2d5512b361 | ||
|
|
fa7bdf0559 | ||
|
|
57c7b0a884 | ||
|
|
4874f90a8d | ||
|
|
5a9e92afca | ||
|
|
9bd9b03572 | ||
|
|
3569065d7e | ||
|
|
c0e8887d5a | ||
|
|
3c8d00aea0 | ||
|
|
bc93b04b0c | ||
|
|
adf39dd33f | ||
|
|
000295122d | ||
|
|
2611d033d7 | ||
|
|
ee753b4724 | ||
|
|
db6938a4d0 | ||
|
|
d02f50ee62 | ||
|
|
f4237b7e76 | ||
|
|
302fc5e00d | ||
|
|
c77b3fb96e | ||
|
|
2a54ab5016 | ||
|
|
2ade8a09a3 | ||
|
|
a60e5ce8ec | ||
|
|
8d0251c1f7 | ||
|
|
80220dcd46 | ||
|
|
e72fd9acb1 | ||
|
|
17783bfb99 | ||
|
|
3c764cd93a | ||
|
|
e1968324ff | ||
|
|
2a6736cca7 | ||
|
|
c8765c966b | ||
|
|
61789e2fdb | ||
|
|
5cd2d139cb | ||
|
|
f443b600d9 | ||
|
|
7a2cb623ed | ||
|
|
527f0f324a | ||
|
|
f402506f0f | ||
|
|
f5e53f9476 | ||
|
|
4e90a42a3e | ||
|
|
413a4a4df1 | ||
|
|
452186448a | ||
|
|
eadf14bf6e | ||
|
|
e1257b6fda | ||
|
|
b516b1d4bc | ||
|
|
168b2dee16 | ||
|
|
4704ab1869 | ||
|
|
dc2ced8385 | ||
|
|
5c752c85dd | ||
|
|
e74435a1da | ||
|
|
524b9a00e8 | ||
|
|
a512fe0868 | ||
|
|
62f0f21c3c | ||
|
|
a8440d08d5 | ||
|
|
610c7c68e3 | ||
|
|
ff2fc66cc1 | ||
|
|
a841c540b7 | ||
|
|
aeb12f6eaa | ||
|
|
6fd4ceb7bb | ||
|
|
196a3e577b | ||
|
|
98abb870dc | ||
|
|
bdd2cdf891 | ||
|
|
fb148789ab | ||
|
|
2ff418f28a | ||
|
|
527501d15d | ||
|
|
621809b239 | ||
|
|
8301b8b096 | ||
|
|
7bdfa9736a | ||
|
|
a57997f2d2 | ||
|
|
4489a63a9d | ||
|
|
1707898e5b | ||
|
|
d05f2255a0 | ||
|
|
7892cb2362 | ||
|
|
8a6b62bc2d | ||
|
|
d95a3b53f8 | ||
|
|
257e42ce3d | ||
|
|
074940162c | ||
|
|
df65651d4f | ||
|
|
1b09eb4ccc | ||
|
|
2f11c17b09 | ||
|
|
0ba4970165 | ||
|
|
5431b6a308 | ||
|
|
7f5db167b6 | ||
|
|
239d7b286f | ||
|
|
86b2ad6646 | ||
|
|
5dfb610e99 | ||
|
|
1491baa17e | ||
|
|
db562a696f | ||
|
|
6877465dc1 | ||
|
|
ff23a55f4d | ||
|
|
00a6e13cbf | ||
|
|
25c8db918a | ||
|
|
dabf6fc578 | ||
|
|
14c5d77032 | ||
|
|
380e002752 | ||
|
|
680b07003d | ||
|
|
22aba57acf | ||
|
|
11e4034414 | ||
|
|
882667e383 | ||
|
|
6f964b7776 | ||
|
|
6bdf5d3d00 | ||
|
|
9b3ade946d | ||
|
|
e0b9da7b0a | ||
|
|
726a341ed4 | ||
|
|
1cc5eb6636 | ||
|
|
43ce7ef399 | ||
|
|
4d0c164f60 | ||
|
|
dd9e24a8a4 | ||
|
|
13a3a6890f | ||
|
|
7d468c931c | ||
|
|
425ef85595 | ||
|
|
297313df79 |
5
.github/actions/prepare-test/action.yml
vendored
5
.github/actions/prepare-test/action.yml
vendored
@@ -2,7 +2,7 @@ name: "Prepare test"
|
||||
description: Performs some preparation to run tests
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'nightly', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
description: "The version of the CodeQL CLI to use. Can be 'linked', 'default', 'toolcache', 'nightly', 'nightly-latest', 'nightly-YYYYMMDD', or 'stable-vX.Y.Z"
|
||||
required: true
|
||||
use-all-platform-bundle:
|
||||
description: "If true, we output a tools URL with codeql-bundle.tar.gz file rather than platform-specific URL"
|
||||
@@ -41,6 +41,9 @@ runs:
|
||||
elif [[ "$VERSION" == "linked" ]]; then
|
||||
echo "tools-url=linked" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "toolcache" ]]; then
|
||||
echo "tools-url=toolcache" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
elif [[ "$VERSION" == "default" ]]; then
|
||||
echo "tools-url=" >> "$GITHUB_OUTPUT"
|
||||
exit 0
|
||||
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -16,9 +16,12 @@ updates:
|
||||
- dependency-name: "eslint-plugin-import"
|
||||
versions: [">=2.30.0"]
|
||||
groups:
|
||||
npm:
|
||||
npm-minor:
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
- package-ecosystem: github-actions
|
||||
directories:
|
||||
- "/.github/workflows"
|
||||
@@ -28,6 +31,9 @@ updates:
|
||||
labels:
|
||||
- Rebuild
|
||||
groups:
|
||||
actions:
|
||||
actions-minor:
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
49
.github/pull_request_template.md
vendored
49
.github/pull_request_template.md
vendored
@@ -1,4 +1,13 @@
|
||||
<!-- For GitHub staff: Remember that this is a public repository. -->
|
||||
<!--
|
||||
For GitHub staff: Remember that this is a public repository. Do not link to internal resources.
|
||||
If necessary, link to this PR from an internal issue and include further details there.
|
||||
|
||||
Everyone: Include a summary of the context of this change, what it aims to accomplish, and why you
|
||||
chose the approach you did if applicable. Indicate any open questions you want to answer
|
||||
during the review process and anything you want reviewers to pay particular attention to.
|
||||
|
||||
See https://github.com/github/codeql-action/blob/main/CONTRIBUTING.md for additional information.
|
||||
-->
|
||||
|
||||
### Risk assessment
|
||||
|
||||
@@ -7,6 +16,44 @@ For internal use only. Please select the risk level of this change:
|
||||
- **Low risk:** Changes are fully under feature flags, or have been fully tested and validated in pre-production environments and are highly observable, or are documentation or test only.
|
||||
- **High risk:** Changes are not fully under feature flags, have limited visibility and/or cannot be tested outside of production.
|
||||
|
||||
#### Which use cases does this change impact?
|
||||
|
||||
<!-- Delete options that don't apply. -->
|
||||
|
||||
- **Advanced setup** - Impacts users who have custom workflows.
|
||||
- **Default setup** - Impacts users who use default setup.
|
||||
- **Code Scanning** - Impacts Code Scanning (i.e. `analysis-kinds: code-scanning`).
|
||||
- **Code Quality** - Impacts Code Quality (i.e. `analysis-kinds: code-quality`).
|
||||
- **Third-party analyses** - Impacts third-party analyses (i.e. `upload-sarif`).
|
||||
- **GHES** - Impacts GitHub Enterprise Server.
|
||||
|
||||
#### How did/will you validate this change?
|
||||
|
||||
<!-- Delete options that don't apply. -->
|
||||
|
||||
- **Test repository** - This change will be tested on a test repository before merging.
|
||||
- **Unit tests** - I am depending on unit test coverage (i.e. tests in `.test.ts` files).
|
||||
- **End-to-end tests** - I am depending on PR checks (i.e. tests in `pr-checks`).
|
||||
- **Other** - Please provide details.
|
||||
- **None** - I am not validating these changes.
|
||||
|
||||
#### If something goes wrong after this change is released, what are the mitigation and rollback strategies?
|
||||
|
||||
<!-- Delete strategies that don't apply. -->
|
||||
|
||||
- **Feature flags** - All new or changed code paths can be fully disabled with corresponding feature flags.
|
||||
- **Rollback** - Change can only be disabled by rolling back the release or releasing a new version with a fix.
|
||||
- **Other** - Please provide details.
|
||||
|
||||
#### How will you know if something goes wrong after this change is released?
|
||||
|
||||
<!-- Delete options that don't apply. -->
|
||||
|
||||
- **Telemetry** - I rely on existing telemetry or have made changes to the telemetry.
|
||||
- **Dashboards** - I will watch relevant dashboards for issues after the release. Consider whether this requires this change to be released at a particular time rather than as part of a regular release.
|
||||
- **Alerts** - New or existing monitors will trip if something goes wrong with this change.
|
||||
- **Other** - Please provide details.
|
||||
|
||||
### Merge / deployment checklist
|
||||
|
||||
- Confirm this change is backwards compatible with existing workflows.
|
||||
|
||||
55
.github/sizeup.yml
vendored
Normal file
55
.github/sizeup.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
labeling:
|
||||
applyCategoryLabels: true
|
||||
categoryLabelPrefix: "size/"
|
||||
|
||||
commenting:
|
||||
addCommentWhenScoreThresholdHasBeenExceeded: false
|
||||
|
||||
sizeup:
|
||||
categories:
|
||||
- name: extra small
|
||||
lte: 25
|
||||
label:
|
||||
name: XS
|
||||
description: Should be very easy to review
|
||||
color: 3cbf00
|
||||
- name: small
|
||||
lte: 100
|
||||
label:
|
||||
name: S
|
||||
description: Should be easy to review
|
||||
color: 5d9801
|
||||
- name: medium
|
||||
lte: 250
|
||||
label:
|
||||
name: M
|
||||
description: Should be of average difficulty to review
|
||||
color: 7f7203
|
||||
- name: large
|
||||
lte: 500
|
||||
label:
|
||||
name: L
|
||||
description: May be hard to review
|
||||
color: a14c05
|
||||
- name: extra large
|
||||
lte: 1000
|
||||
label:
|
||||
name: XL
|
||||
description: May be very hard to review
|
||||
color: c32607
|
||||
- name: extra extra large
|
||||
label:
|
||||
name: XXL
|
||||
description: May be extremely hard to review
|
||||
color: e50009
|
||||
ignoredFilePatterns:
|
||||
- ".github/workflows/__*"
|
||||
- "lib/**/*"
|
||||
- "package-lock.json"
|
||||
testFilePatterns:
|
||||
- "**/*.test.ts"
|
||||
scoring:
|
||||
# This formula and the aliases below it are written in prefix notation.
|
||||
# For an explanation of how this works, please see:
|
||||
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
|
||||
formula: "- - + additions deletions comments whitespace"
|
||||
8
.github/update-release-branch.py
vendored
8
.github/update-release-branch.py
vendored
@@ -371,10 +371,10 @@ def main():
|
||||
# releases.
|
||||
run_git('revert', vOlder_update_commits[0], '--no-edit')
|
||||
|
||||
# Also revert the "Update checked-in dependencies" commit created by Actions.
|
||||
update_dependencies_commit = run_git('log', '--grep', '^Update checked-in dependencies', '--format=%H').split()[0]
|
||||
print(f' Reverting {update_dependencies_commit}')
|
||||
run_git('revert', update_dependencies_commit, '--no-edit')
|
||||
# Also revert the "Rebuild" commit created by Actions.
|
||||
rebuild_commit = run_git('log', '--grep', '^Rebuild$', '--format=%H').split()[0]
|
||||
print(f' Reverting {rebuild_commit}')
|
||||
run_git('revert', rebuild_commit, '--no-edit')
|
||||
|
||||
else:
|
||||
print(' Nothing to revert.')
|
||||
|
||||
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Upload-sarif: code quality endpoint'
|
||||
name: 'PR Check - Bundle: From toolcache'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -21,19 +21,9 @@ on:
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
inputs: {}
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
inputs: {}
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -41,14 +31,14 @@ concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-quality-sarif:
|
||||
bundle-from-toolcache:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
name: 'Upload-sarif: code quality endpoint'
|
||||
version: toolcache
|
||||
name: 'Bundle: From toolcache'
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -65,31 +55,31 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v6
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- uses: ./../action/init
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: setup-codeql
|
||||
uses: ./../action/setup-codeql
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
- name: Check output from `upload-sarif` step
|
||||
if: '!(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)'
|
||||
run: exit 1
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
console.log(`Found CodeQL versions: ${allCodeqlVersions}`);
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error('CodeQL not found in toolcache');
|
||||
}
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
2
.github/workflows/__config-input.yml
generated
vendored
2
.github/workflows/__config-input.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
15
.github/workflows/__local-bundle.yml
generated
vendored
15
.github/workflows/__local-bundle.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||
|
||||
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -104,6 +114,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Use Xcode 16
|
||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
||||
|
||||
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -63,7 +73,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
@@ -81,6 +91,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
9
.github/workflows/__quality-queries.yml
generated
vendored
9
.github/workflows/__quality-queries.yml
generated
vendored
@@ -80,6 +80,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -96,6 +97,14 @@ jobs:
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/post-processed
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
|
||||
15
.github/workflows/__remote-config.yml
generated
vendored
15
.github/workflows/__remote-config.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -72,6 +82,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@0481980f17b760ef6bca5e8c55809102a0af1e5a # v1.263.0
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
15
.github/workflows/__unset-environment.yml
generated
vendored
15
.github/workflows/__unset-environment.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -72,6 +82,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
173
.github/workflows/__upload-sarif.yml
generated
vendored
Normal file
173
.github/workflows/__upload-sarif.yml
generated
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test different uses of `upload-sarif`
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v*
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
type: string
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
jobs:
|
||||
upload-sarif:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-scanning
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-quality
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
name: Test different uses of `upload-sarif`
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: read
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v6
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: ${{ matrix.analysis-kinds }}
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: never
|
||||
output: ${{ runner.temp }}/results
|
||||
|
||||
- name: |
|
||||
Upload all SARIF files for `analysis-kinds: ${{ matrix.analysis-kinds }}`
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
|
||||
- name: Fail for missing output from `upload-sarif` step for `code-scanning`
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
- name: Fail for missing output from `upload-sarif` step for `code-quality`
|
||||
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
|
||||
run: exit 1
|
||||
|
||||
- name: Upload single SARIF file for Code Scanning
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-scanning
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
|
||||
- name: Fail for missing output from `upload-single-sarif-code-scanning` step
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') &&
|
||||
!(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
- name: Upload single SARIF file for Code Quality
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-quality
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
|
||||
- name: Fail for missing output from `upload-single-sarif-code-quality` step
|
||||
if: contains(matrix.analysis-kinds, 'code-quality') &&
|
||||
!(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
|
||||
run: exit 1
|
||||
|
||||
- name: Change SARIF file extension
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
|
||||
- name: Upload single non-`.sarif` file
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-non-sarif
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
with:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
|
||||
- name: Fail for missing output from `upload-single-non-sarif` step
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
|
||||
run: exit 1
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
28
.github/workflows/__with-checkout-path.yml
generated
vendored
28
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Delete original checkout
|
||||
run: |
|
||||
# delete the original checkout so we don't accidentally use it.
|
||||
@@ -103,29 +118,30 @@ jobs:
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
run: |
|
||||
PAYLOAD_FILE="$RUNNER_TEMP/payload-code-scanning.json"
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
ACTUAL_COMMIT_OID="$(cat "$PAYLOAD_FILE" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$PAYLOAD_FILE" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$PAYLOAD_FILE" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
26
.github/workflows/label-pr-size.yml
vendored
Normal file
26
.github/workflows/label-pr-size.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Label PR with size
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
- ready_for_review
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
sizeup:
|
||||
name: Label PR with size
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Run sizeup
|
||||
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
|
||||
with:
|
||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
configuration-file-path: ".github/sizeup.yml"
|
||||
3
.github/workflows/post-release-mergeback.yml
vendored
3
.github/workflows/post-release-mergeback.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v6
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
@@ -146,6 +146,7 @@ jobs:
|
||||
private-key: ${{ secrets.AUTOMATION_PRIVATE_KEY }}
|
||||
|
||||
- name: Create the GitHub release
|
||||
if: steps.check.outputs.exists != 'true'
|
||||
env:
|
||||
PARTIAL_CHANGELOG: "${{ runner.temp }}/partial_changelog.md"
|
||||
VERSION: "${{ steps.getVersion.outputs.version }}"
|
||||
|
||||
4
.github/workflows/pr-checks.yml
vendored
4
.github/workflows/pr-checks.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
run: npm run lint-ci
|
||||
|
||||
- name: Upload sarif
|
||||
uses: github/codeql-action/upload-sarif@v3
|
||||
uses: github/codeql-action/upload-sarif@v4
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.node-version == 24
|
||||
with:
|
||||
sarif_file: eslint.sarif
|
||||
|
||||
2
.github/workflows/query-filters.yml
vendored
2
.github/workflows/query-filters.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/update-bundle.yml
vendored
2
.github/workflows/update-bundle.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -2,6 +2,24 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.0 - 24 Oct 2025
|
||||
|
||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
||||
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
|
||||
|
||||
## 4.30.9 - 17 Oct 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
|
||||
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
|
||||
|
||||
## 4.30.8 - 10 Oct 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.30.7 - 06 Oct 2025
|
||||
|
||||
- [v4+ only] The CodeQL Action now runs on Node.js v24. [#3169](https://github.com/github/codeql-action/pull/3169)
|
||||
|
||||
@@ -34,6 +34,7 @@ Actions with special purposes and unlikely to be used directly:
|
||||
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
||||
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
||||
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
||||
- `setup-codeql`: [Experimental] Similar to `init`, except it only installs the CodeQL CLI and does not initialize a database.
|
||||
|
||||
### Workflow Permissions
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ inputs:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
|
||||
required: false
|
||||
default: "../results"
|
||||
upload:
|
||||
@@ -70,6 +70,12 @@ inputs:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
post-processed-sarif-path:
|
||||
description: >-
|
||||
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
|
||||
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
|
||||
argument for this input, they are written to the specified directory.
|
||||
required: false
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
|
||||
@@ -131,6 +131,7 @@ export default [
|
||||
"no-sequences": "error",
|
||||
"no-shadow": "off",
|
||||
"@typescript-eslint/no-shadow": "error",
|
||||
"@typescript-eslint/prefer-optional-chain": "error",
|
||||
"one-var": ["error", "never"],
|
||||
},
|
||||
},
|
||||
|
||||
1481
lib/analyze-action-post.js
generated
1481
lib/analyze-action-post.js
generated
File diff suppressed because it is too large
Load Diff
3247
lib/analyze-action.js
generated
3247
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
1516
lib/autobuild-action.js
generated
1516
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
||||
"cliVersion": "2.23.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
||||
"priorCliVersion": "2.23.2"
|
||||
}
|
||||
|
||||
3083
lib/init-action-post.js
generated
3083
lib/init-action-post.js
generated
File diff suppressed because it is too large
Load Diff
1987
lib/init-action.js
generated
1987
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
1505
lib/resolve-environment-action.js
generated
1505
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
88986
lib/setup-codeql-action.js
generated
Normal file
88986
lib/setup-codeql-action.js
generated
Normal file
File diff suppressed because one or more lines are too long
1475
lib/start-proxy-action-post.js
generated
1475
lib/start-proxy-action-post.js
generated
File diff suppressed because it is too large
Load Diff
1445
lib/start-proxy-action.js
generated
1445
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
471
lib/upload-lib.js
generated
471
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
1441
lib/upload-sarif-action-post.js
generated
1441
lib/upload-sarif-action-post.js
generated
File diff suppressed because it is too large
Load Diff
482
lib/upload-sarif-action.js
generated
482
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
1002
package-lock.json
generated
1002
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.30.7",
|
||||
"version": "4.31.1",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -35,6 +35,7 @@
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
@@ -47,15 +48,15 @@
|
||||
"jsonschema": "1.4.1",
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
"octokit": "^5.0.3",
|
||||
"semver": "^7.7.2",
|
||||
"octokit": "^5.0.4",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.36.0",
|
||||
"@eslint/js": "^9.38.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
@@ -66,10 +67,10 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.44.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.1",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.10",
|
||||
"esbuild": "^0.25.11",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
@@ -79,7 +80,7 @@
|
||||
"glob": "^11.0.3",
|
||||
"nock": "^14.0.10",
|
||||
"sinon": "^21.0.0",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@actions/tool-cache": {
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
31
pr-checks/checks/bundle-from-toolcache.yml
Normal file
31
pr-checks/checks/bundle-from-toolcache.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
name: "Bundle: From toolcache"
|
||||
description: "The CodeQL bundle should be cached within the toolcache"
|
||||
versions:
|
||||
- toolcache
|
||||
steps:
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: setup-codeql
|
||||
uses: ./../action/setup-codeql
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const toolcache = require('@actions/tool-cache');
|
||||
const allCodeqlVersions = toolcache.findAllVersions('CodeQL');
|
||||
console.log(`Found CodeQL versions: ${allCodeqlVersions}`);
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error('CodeQL not found in toolcache');
|
||||
}
|
||||
@@ -2,6 +2,7 @@ name: "Local CodeQL bundle"
|
||||
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||
versions: ["linked"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
|
||||
@@ -4,6 +4,7 @@ operatingSystems: ["macos", "ubuntu"]
|
||||
env:
|
||||
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- name: Use Xcode 16
|
||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||
|
||||
@@ -3,6 +3,7 @@ description: "Checks that specifying packages using a combination of a config fi
|
||||
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
||||
installGo: true
|
||||
installNode: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -36,6 +36,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
@@ -52,6 +53,14 @@ steps:
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/post-processed"
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
|
||||
@@ -6,6 +6,7 @@ versions:
|
||||
- linked
|
||||
- nightly-latest
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@0481980f17b760ef6bca5e8c55809102a0af1e5a # v1.263.0
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -6,6 +6,7 @@ versions:
|
||||
- linked
|
||||
- nightly-latest
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
name: "Upload-sarif: code quality endpoint"
|
||||
description: "Checks that uploading SARIFs to the code quality endpoint works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: code-quality
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
upload: never
|
||||
- uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
- name: "Check output from `upload-sarif` step"
|
||||
if: '!(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)'
|
||||
run: exit 1
|
||||
@@ -2,6 +2,7 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
82
pr-checks/checks/upload-sarif.yml
Normal file
82
pr-checks/checks/upload-sarif.yml
Normal file
@@ -0,0 +1,82 @@
|
||||
name: "Test different uses of `upload-sarif`"
|
||||
description: "Checks that uploading SARIFs to the code quality endpoint works"
|
||||
versions: ["default"]
|
||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: csharp,java,javascript,python
|
||||
analysis-kinds: ${{ matrix.analysis-kinds }}
|
||||
- name: Build code
|
||||
run: ./build.sh
|
||||
# Generate some SARIF we can upload with the upload-sarif step
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
upload: never
|
||||
output: ${{ runner.temp }}/results
|
||||
|
||||
- name: |
|
||||
Upload all SARIF files for `analysis-kinds: ${{ matrix.analysis-kinds }}`
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-sarif
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
|
||||
- name: "Fail for missing output from `upload-sarif` step for `code-scanning`"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
- name: "Fail for missing output from `upload-sarif` step for `code-quality`"
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)"
|
||||
run: exit 1
|
||||
|
||||
- name: Upload single SARIF file for Code Scanning
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-scanning
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
|
||||
- name: "Fail for missing output from `upload-single-sarif-code-scanning` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
- name: Upload single SARIF file for Code Quality
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-sarif-code-quality
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
|
||||
- name: "Fail for missing output from `upload-single-sarif-code-quality` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)"
|
||||
run: exit 1
|
||||
|
||||
- name: Change SARIF file extension
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
|
||||
- name: Upload single non-`.sarif` file
|
||||
uses: ./../action/upload-sarif
|
||||
id: upload-single-non-sarif
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning')"
|
||||
with:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
|
||||
category: |
|
||||
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
|
||||
- name: "Fail for missing output from `upload-single-non-sarif` step"
|
||||
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)"
|
||||
run: exit 1
|
||||
@@ -2,6 +2,7 @@ name: "Use a custom `checkout_path`"
|
||||
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
||||
versions: ["linked"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
# This ensures we don't accidentally use the original checkout for any part of the test.
|
||||
- name: Delete original checkout
|
||||
@@ -37,28 +38,29 @@ steps:
|
||||
|
||||
- name: Verify SARIF after upload
|
||||
run: |
|
||||
PAYLOAD_FILE="$RUNNER_TEMP/payload-code-scanning.json"
|
||||
EXPECTED_COMMIT_OID="474bbf07f9247ffe1856c6a0f94aeeb10e7afee6"
|
||||
EXPECTED_REF="v1.1.0"
|
||||
EXPECTED_CHECKOUT_URI_SUFFIX="/x/y/z/some-path/tests/multi-language-repo"
|
||||
|
||||
ACTUAL_COMMIT_OID="$(cat "$RUNNER_TEMP/payload.json" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$RUNNER_TEMP/payload.json" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$RUNNER_TEMP/payload.json" | jq -r .checkout_uri)"
|
||||
ACTUAL_COMMIT_OID="$(cat "$PAYLOAD_FILE" | jq -r .commit_oid)"
|
||||
ACTUAL_REF="$(cat "$PAYLOAD_FILE" | jq -r .ref)"
|
||||
ACTUAL_CHECKOUT_URI="$(cat "$PAYLOAD_FILE" | jq -r .checkout_uri)"
|
||||
|
||||
if [[ "$EXPECTED_COMMIT_OID" != "$ACTUAL_COMMIT_OID" ]]; then
|
||||
echo "::error Invalid commit oid. Expected: $EXPECTED_COMMIT_OID Actual: $ACTUAL_COMMIT_OID"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$EXPECTED_REF" != "$ACTUAL_REF" ]]; then
|
||||
echo "::error Invalid ref. Expected: '$EXPECTED_REF' Actual: '$ACTUAL_REF'"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$ACTUAL_CHECKOUT_URI" != *$EXPECTED_CHECKOUT_URI_SUFFIX ]]; then
|
||||
echo "::error Invalid checkout URI suffix. Expected suffix: $EXPECTED_CHECKOUT_URI_SUFFIX Actual uri: $ACTUAL_CHECKOUT_URI"
|
||||
echo "$RUNNER_TEMP/payload.json"
|
||||
echo "$PAYLOAD_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -117,7 +117,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
steps.extend([
|
||||
{
|
||||
'name': 'Install Node.js',
|
||||
'uses': 'actions/setup-node@v5',
|
||||
'uses': 'actions/setup-node@v6',
|
||||
'with': {
|
||||
'node-version': '20.x',
|
||||
'cache': 'npm',
|
||||
@@ -184,6 +184,26 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
}
|
||||
})
|
||||
|
||||
installPython = is_truthy(checkSpecification.get('installPython', ''))
|
||||
|
||||
if installPython:
|
||||
basePythonVersionExpr = '3.13'
|
||||
workflowInputs['python-version'] = {
|
||||
'type': 'string',
|
||||
'description': 'The version of Python to install',
|
||||
'required': False,
|
||||
'default': basePythonVersionExpr,
|
||||
}
|
||||
|
||||
steps.append({
|
||||
'name': 'Install Python',
|
||||
'if': 'matrix.version != \'nightly-latest\'',
|
||||
'uses': 'actions/setup-python@v6',
|
||||
'with': {
|
||||
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
|
||||
}
|
||||
})
|
||||
|
||||
# If container initialisation steps are present in the check specification,
|
||||
# make sure to execute them first.
|
||||
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
||||
|
||||
39
setup-codeql/action.yml
Normal file
39
setup-codeql/action.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
name: 'CodeQL: Setup'
|
||||
description: 'Installs the CodeQL CLI'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
tools:
|
||||
description: >-
|
||||
By default, the Action will use the recommended version of the CodeQL
|
||||
Bundle to analyze your project. You can override this choice using this
|
||||
input. One of:
|
||||
|
||||
- A local path to a CodeQL Bundle tarball, or
|
||||
- The URL of a CodeQL Bundle tarball GitHub release asset, or
|
||||
- A special value `linked` which uses the version of the CodeQL tools
|
||||
that the Action has been bundled with.
|
||||
- A special value `nightly` which uses the latest nightly version of the
|
||||
CodeQL tools. Note that this is unstable and not recommended for
|
||||
production use.
|
||||
|
||||
If not specified, the Action will check in several places until it finds
|
||||
the CodeQL tools.
|
||||
required: false
|
||||
token:
|
||||
description: GitHub token to use for authenticating with this instance of GitHub.
|
||||
default: ${{ github.token }}
|
||||
required: false
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
required: false
|
||||
external-repository-token:
|
||||
description: A token for fetching additional files from private repositories in the same GitHub instance that is running this action.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary that was installed.
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary that was installed.
|
||||
runs:
|
||||
using: node24
|
||||
main: '../lib/setup-codeql-action.js'
|
||||
@@ -247,9 +247,14 @@ export function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
|
||||
/** Determines whether the workflow trigger is `dynamic`. */
|
||||
export function isDynamicWorkflow(): boolean {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
|
||||
/** Determines whether we are running in default setup. */
|
||||
export function isDefaultSetup(): boolean {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
return isDynamicWorkflow();
|
||||
}
|
||||
|
||||
export function prettyPrintInvocation(cmd: string, args: string[]): string {
|
||||
|
||||
@@ -1,12 +1,19 @@
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import {
|
||||
AnalysisKind,
|
||||
getAnalysisKinds,
|
||||
parseAnalysisKinds,
|
||||
supportedAnalysisKinds,
|
||||
} from "./analyses";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("All known analysis kinds can be parsed successfully", async (t) => {
|
||||
for (const analysisKind of supportedAnalysisKinds) {
|
||||
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
||||
@@ -34,3 +41,29 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
|
||||
instanceOf: ConfigurationError,
|
||||
});
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub
|
||||
.withArgs("analysis-kinds")
|
||||
.returns("code-scanning,code-quality");
|
||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("quality-queries").returns("code-quality");
|
||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
|
||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
|
||||
});
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { fixCodeQualityCategory } from "./actions-util";
|
||||
import {
|
||||
fixCodeQualityCategory,
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
} from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
@@ -41,6 +45,55 @@ export async function parseAnalysisKinds(
|
||||
);
|
||||
}
|
||||
|
||||
// Used to avoid re-parsing the input after we have done it once.
|
||||
let cachedAnalysisKinds: AnalysisKind[] | undefined;
|
||||
|
||||
/**
|
||||
* Initialises the analysis kinds for the analysis based on the `analysis-kinds` input.
|
||||
* This function will also use the deprecated `quality-queries` input as an indicator to enable `code-quality`.
|
||||
* If the `analysis-kinds` input cannot be parsed, a `ConfigurationError` is thrown.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param skipCache For testing, whether to ignore the cached values (default: false).
|
||||
*
|
||||
* @returns The array of enabled analysis kinds.
|
||||
* @throws A `ConfigurationError` if the `analysis-kinds` input cannot be parsed.
|
||||
*/
|
||||
export async function getAnalysisKinds(
|
||||
logger: Logger,
|
||||
skipCache: boolean = false,
|
||||
): Promise<AnalysisKind[]> {
|
||||
if (!skipCache && cachedAnalysisKinds !== undefined) {
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
cachedAnalysisKinds = await parseAnalysisKinds(
|
||||
getRequiredInput("analysis-kinds"),
|
||||
);
|
||||
|
||||
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
||||
const qualityQueriesInput = getOptionalInput("quality-queries");
|
||||
|
||||
if (qualityQueriesInput !== undefined) {
|
||||
logger.warning(
|
||||
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
||||
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
||||
);
|
||||
}
|
||||
|
||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||
// if an input to `quality-queries` was specified. We should remove this once
|
||||
// `quality-queries` is no longer used.
|
||||
if (
|
||||
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
qualityQueriesInput !== undefined
|
||||
) {
|
||||
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
|
||||
}
|
||||
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
/** The queries to use for Code Quality analyses. */
|
||||
export const codeQualityQueries: string[] = ["code-quality"];
|
||||
|
||||
|
||||
@@ -24,6 +24,9 @@ setupTests(test);
|
||||
// but the first test would fail.
|
||||
|
||||
test("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
// This test frequently times out on Windows with the default timeout, so we bump
|
||||
// it a bit to 20s.
|
||||
t.timeout(1000 * 20);
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
|
||||
@@ -24,6 +24,7 @@ setupTests(test);
|
||||
// but the first test would fail.
|
||||
|
||||
test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
t.timeout(1000 * 20);
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
|
||||
@@ -52,6 +52,7 @@ import {
|
||||
} from "./trap-caching";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
interface AnalysisStatusReport
|
||||
@@ -211,7 +212,9 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadResult: UploadResult | undefined = undefined;
|
||||
let uploadResults:
|
||||
| Partial<Record<analyses.AnalysisKind, UploadResult>>
|
||||
| undefined = undefined;
|
||||
let runStats: QueriesStatusReport | undefined = undefined;
|
||||
let config: Config | undefined = undefined;
|
||||
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
||||
@@ -341,31 +344,67 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
const uploadKind = actionsUtil.getUploadValue(
|
||||
actionsUtil.getOptionalInput("upload"),
|
||||
);
|
||||
if (runStats) {
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
if (Math.random() > -1) {
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
} else if (uploadKind === "always") {
|
||||
uploadResults = {};
|
||||
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
);
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeQuality] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
uploadResults = {};
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = analyses.CodeQuality;
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
analysis,
|
||||
// Set the SARIF id outputs only if we have results for them, to avoid
|
||||
// having keys with empty values in the action output.
|
||||
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
|
||||
core.setOutput(
|
||||
"sarif-id",
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
||||
);
|
||||
}
|
||||
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
|
||||
core.setOutput(
|
||||
"quality-sarif-id",
|
||||
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
|
||||
);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
} else {
|
||||
logger.info("Not uploading results");
|
||||
@@ -408,12 +447,12 @@ async function run() {
|
||||
if (util.isInTestMode()) {
|
||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (
|
||||
uploadResult !== undefined &&
|
||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
||||
) {
|
||||
await uploadLib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
||||
getActionsLogger(),
|
||||
);
|
||||
}
|
||||
@@ -450,13 +489,16 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
|
||||
if (runStats && uploadResult) {
|
||||
if (
|
||||
runStats !== undefined &&
|
||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
|
||||
) {
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
{
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
|
||||
},
|
||||
undefined,
|
||||
trapCacheUploadTime,
|
||||
@@ -466,7 +508,7 @@ async function run() {
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else if (runStats) {
|
||||
} else if (runStats !== undefined) {
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
|
||||
@@ -7,12 +7,12 @@ import { getActionVersion, getRequiredInput } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
import {
|
||||
asHTTPError,
|
||||
ConfigurationError,
|
||||
getRequiredEnvParam,
|
||||
GITHUB_DOTCOM_URL,
|
||||
GitHubVariant,
|
||||
GitHubVersion,
|
||||
isHTTPError,
|
||||
parseGitHubUrl,
|
||||
parseMatrixInput,
|
||||
} from "./util";
|
||||
@@ -280,22 +280,29 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
||||
}
|
||||
|
||||
export function wrapApiConfigurationError(e: unknown) {
|
||||
if (isHTTPError(e)) {
|
||||
const httpError = asHTTPError(e);
|
||||
if (httpError !== undefined) {
|
||||
if (
|
||||
e.message.includes("API rate limit exceeded for installation") ||
|
||||
e.message.includes("commit not found") ||
|
||||
e.message.includes("Resource not accessible by integration") ||
|
||||
/ref .* not found in this repository/.test(e.message)
|
||||
[
|
||||
/API rate limit exceeded/,
|
||||
/commit not found/,
|
||||
/Resource not accessible by integration/,
|
||||
/ref .* not found in this repository/,
|
||||
].some((pattern) => pattern.test(httpError.message))
|
||||
) {
|
||||
return new ConfigurationError(e.message);
|
||||
} else if (
|
||||
e.message.includes("Bad credentials") ||
|
||||
e.message.includes("Not Found")
|
||||
return new ConfigurationError(httpError.message);
|
||||
}
|
||||
if (
|
||||
httpError.message.includes("Bad credentials") ||
|
||||
httpError.message.includes("Not Found")
|
||||
) {
|
||||
return new ConfigurationError(
|
||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||
);
|
||||
}
|
||||
if (httpError.status === 429) {
|
||||
return new ConfigurationError("API rate limit exceeded");
|
||||
}
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
@@ -310,6 +310,20 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
|
||||
t.true(wrappedError instanceof ConfigurationError);
|
||||
});
|
||||
|
||||
test("wrapCliConfigurationError - unknown query file", (t) => {
|
||||
const commandError = new CommandInvocationError(
|
||||
"codeql",
|
||||
["database", "init"],
|
||||
2,
|
||||
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
|
||||
);
|
||||
const cliError = new CliError(commandError);
|
||||
|
||||
const wrappedError = wrapCliConfigurationError(cliError);
|
||||
|
||||
t.true(wrappedError instanceof ConfigurationError);
|
||||
});
|
||||
|
||||
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
||||
const commandError = new CommandInvocationError(
|
||||
"codeql",
|
||||
|
||||
@@ -264,6 +264,9 @@ export const cliErrorsConfig: Record<
|
||||
new RegExp(
|
||||
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
||||
),
|
||||
new RegExp(
|
||||
"is not a .ql file, .qls file, a directory, or a query pack specification.",
|
||||
),
|
||||
],
|
||||
},
|
||||
[CliConfigErrorCategory.PackMissingAuth]: {
|
||||
|
||||
@@ -36,7 +36,6 @@ import {
|
||||
createTestConfig,
|
||||
} from "./testing-utils";
|
||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
import { initializeEnvironment } from "./util";
|
||||
|
||||
@@ -74,6 +73,7 @@ async function installIntoToolcache({
|
||||
cliVersion !== undefined
|
||||
? { cliVersion, tagName }
|
||||
: SAMPLE_DEFAULT_CLI_VERSION,
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -122,6 +122,8 @@ async function stubCodeql(): Promise<codeql.CodeQL> {
|
||||
}
|
||||
|
||||
test("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -140,6 +142,7 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -154,6 +157,8 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
|
||||
});
|
||||
|
||||
test("caches semantically versioned bundles using their semantic version number", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const url = mockBundleDownloadApi({
|
||||
@@ -166,6 +171,7 @@ test("caches semantically versioned bundles using their semantic version number"
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -181,6 +187,8 @@ test("caches semantically versioned bundles using their semantic version number"
|
||||
});
|
||||
|
||||
test("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -199,6 +207,7 @@ test("downloads an explicitly requested bundle even if a different version is ca
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -227,6 +236,8 @@ for (const {
|
||||
expectedToolcacheVersion,
|
||||
} of EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES) {
|
||||
test(`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -243,6 +254,7 @@ for (const {
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -266,6 +278,8 @@ for (const toolcacheVersion of [
|
||||
`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
|
||||
`${toolcacheVersion} is installed`,
|
||||
async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -281,6 +295,7 @@ for (const toolcacheVersion of [
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -295,6 +310,8 @@ for (const toolcacheVersion of [
|
||||
}
|
||||
|
||||
test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -313,6 +330,7 @@ test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
},
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -328,6 +346,8 @@ test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
|
||||
});
|
||||
|
||||
test(`downloads bundle if only an unpinned version is cached on GHES`, async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -349,6 +369,7 @@ test(`downloads bundle if only an unpinned version is cached on GHES`, async (t)
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
},
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -364,6 +385,8 @@ test(`downloads bundle if only an unpinned version is cached on GHES`, async (t)
|
||||
});
|
||||
|
||||
test('downloads bundle if "latest" tools specified but not cached', async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -382,6 +405,7 @@ test('downloads bundle if "latest" tools specified but not cached', async (t) =>
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -397,6 +421,8 @@ test('downloads bundle if "latest" tools specified but not cached', async (t) =>
|
||||
});
|
||||
|
||||
test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -417,6 +443,7 @@ test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t)
|
||||
tmpDir,
|
||||
util.GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
false,
|
||||
);
|
||||
@@ -842,84 +869,6 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
||||
});
|
||||
});
|
||||
|
||||
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0", {
|
||||
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
|
||||
}),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
flagPassed: true,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.GHES,
|
||||
version: "3.10.0",
|
||||
},
|
||||
flagPassed: true,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
];
|
||||
|
||||
for (const {
|
||||
codeqlVersion,
|
||||
flagPassed,
|
||||
githubVersion,
|
||||
negativeFlagPassed,
|
||||
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
|
||||
test(`database interpret-results passes ${
|
||||
flagPassed
|
||||
? "--new-analysis-summary"
|
||||
: negativeFlagPassed
|
||||
? "--no-new-analysis-summary"
|
||||
: "nothing"
|
||||
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
|
||||
util.GitHubVariant[githubVersion.type]
|
||||
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
undefined,
|
||||
"",
|
||||
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
|
||||
createFeatures([]),
|
||||
);
|
||||
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
|
||||
t.is(
|
||||
actualArgs.includes("--new-analysis-summary"),
|
||||
flagPassed,
|
||||
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
|
||||
);
|
||||
t.is(
|
||||
actualArgs.includes("--no-new-analysis-summary"),
|
||||
negativeFlagPassed,
|
||||
`--no-new-analysis-summary should${
|
||||
negativeFlagPassed ? "" : "n't"
|
||||
} be passed`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
test("runTool summarizes several fatal errors", async (t) => {
|
||||
const heapError =
|
||||
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
||||
|
||||
@@ -267,7 +267,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||
const CODEQL_MINIMUM_VERSION = "2.17.6";
|
||||
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
@@ -308,6 +308,7 @@ const CODEQL_VERSION_CACHE_CLEANUP = "2.17.1";
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param defaultCliVersion
|
||||
* @param features Information about the features that are enabled.
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
@@ -319,6 +320,7 @@ export async function setupCodeQL(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
checkVersion: boolean,
|
||||
): Promise<{
|
||||
@@ -341,6 +343,7 @@ export async function setupCodeQL(
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -367,7 +370,8 @@ export async function setupCodeQL(
|
||||
toolsVersion,
|
||||
zstdAvailability,
|
||||
};
|
||||
} catch (e) {
|
||||
} catch (rawError) {
|
||||
const e = api.wrapApiConfigurationError(rawError);
|
||||
const ErrorClass =
|
||||
e instanceof util.ConfigurationError ||
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
||||
@@ -856,14 +860,6 @@ export async function getCodeQLForCmd(
|
||||
} else {
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (
|
||||
!isSupportedToolsFeature(
|
||||
await this.getVersion(),
|
||||
ToolsFeature.AnalysisSummaryV2IsDefault,
|
||||
)
|
||||
) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
if (querySuitePaths) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
|
||||
@@ -49,10 +49,9 @@ function createTestInitConfigInputs(
|
||||
return Object.assign(
|
||||
{},
|
||||
{
|
||||
analysisKindsInput: "code-scanning",
|
||||
analysisKinds: [AnalysisKind.CodeScanning],
|
||||
languagesInput: undefined,
|
||||
queriesInput: undefined,
|
||||
qualityQueriesInput: undefined,
|
||||
packsInput: undefined,
|
||||
configFile: undefined,
|
||||
dbLocation: undefined,
|
||||
@@ -149,6 +148,7 @@ test("load empty config", async (t) => {
|
||||
});
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
@@ -188,8 +188,9 @@ test("load code quality config", async (t) => {
|
||||
});
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
analysisKindsInput: "code-quality",
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
tempDir,
|
||||
@@ -272,8 +273,9 @@ test("initActionState doesn't throw if there are queries configured in the repos
|
||||
|
||||
await t.notThrowsAsync(async () => {
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
analysisKindsInput: "code-quality",
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
tempDir,
|
||||
@@ -310,6 +312,7 @@ test("loading a saved config produces the same config", async (t) => {
|
||||
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
||||
|
||||
const config1 = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: "javascript,python",
|
||||
tempDir,
|
||||
@@ -361,6 +364,7 @@ test("loading config with version mismatch throws", async (t) => {
|
||||
.returns("does-not-exist");
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: "javascript,python",
|
||||
tempDir,
|
||||
@@ -389,6 +393,7 @@ test("load input outside of workspace", async (t) => {
|
||||
return await withTmpDir(async (tempDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: "../input",
|
||||
tempDir,
|
||||
@@ -416,6 +421,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile,
|
||||
tempDir,
|
||||
@@ -444,6 +450,7 @@ test("load non-existent input", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile,
|
||||
@@ -527,6 +534,7 @@ test("load non-empty input", async (t) => {
|
||||
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
||||
|
||||
const actualConfig = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
buildModeInput: "none",
|
||||
@@ -583,6 +591,7 @@ test("Using config input and file together, config input should be used.", async
|
||||
const languagesInput = "javascript";
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile: configFilePath,
|
||||
@@ -633,6 +642,7 @@ test("API client used when reading remote config", async (t) => {
|
||||
const languagesInput = "javascript";
|
||||
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile,
|
||||
@@ -653,6 +663,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
@@ -681,6 +692,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
@@ -710,6 +722,7 @@ test("No detected languages", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
tempDir,
|
||||
codeql,
|
||||
@@ -732,6 +745,7 @@ test("Unknown languages", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
tempDir,
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
CodeQuality,
|
||||
codeQualityQueries,
|
||||
CodeScanning,
|
||||
parseAnalysisKinds,
|
||||
} from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||
@@ -20,6 +19,7 @@ import {
|
||||
calculateAugmentation,
|
||||
ExcludeQueryFilter,
|
||||
generateCodeScanningConfig,
|
||||
parseUserConfig,
|
||||
UserConfig,
|
||||
} from "./config/db-config";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
@@ -373,10 +373,8 @@ export async function getRawLanguages(
|
||||
|
||||
/** Inputs required to initialize a configuration. */
|
||||
export interface InitConfigInputs {
|
||||
analysisKindsInput: string;
|
||||
languagesInput: string | undefined;
|
||||
queriesInput: string | undefined;
|
||||
qualityQueriesInput: string | undefined;
|
||||
packsInput: string | undefined;
|
||||
configFile: string | undefined;
|
||||
dbLocation: string | undefined;
|
||||
@@ -396,6 +394,7 @@ export interface InitConfigInputs {
|
||||
apiDetails: api.GitHubApiCombinedDetails;
|
||||
features: FeatureEnablement;
|
||||
repositoryProperties: RepositoryProperties;
|
||||
analysisKinds: AnalysisKind[];
|
||||
logger: Logger;
|
||||
}
|
||||
|
||||
@@ -405,10 +404,8 @@ export interface InitConfigInputs {
|
||||
*/
|
||||
export async function initActionState(
|
||||
{
|
||||
analysisKindsInput,
|
||||
languagesInput,
|
||||
queriesInput,
|
||||
qualityQueriesInput,
|
||||
packsInput,
|
||||
buildModeInput,
|
||||
dbLocation,
|
||||
@@ -424,22 +421,11 @@ export async function initActionState(
|
||||
githubVersion,
|
||||
features,
|
||||
repositoryProperties,
|
||||
analysisKinds,
|
||||
logger,
|
||||
}: InitConfigInputs,
|
||||
userConfig: UserConfig,
|
||||
): Promise<Config> {
|
||||
const analysisKinds = await parseAnalysisKinds(analysisKindsInput);
|
||||
|
||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||
// if an input to `quality-queries` was specified. We should remove this once
|
||||
// `quality-queries` is no longer used.
|
||||
if (
|
||||
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
qualityQueriesInput !== undefined
|
||||
) {
|
||||
analysisKinds.push(AnalysisKind.CodeQuality);
|
||||
}
|
||||
|
||||
const languages = await getLanguages(
|
||||
codeql,
|
||||
languagesInput,
|
||||
@@ -540,10 +526,12 @@ async function downloadCacheWithTime(
|
||||
}
|
||||
|
||||
async function loadUserConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
workspacePath: string,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
tempDir: string,
|
||||
validateConfig: boolean,
|
||||
): Promise<UserConfig> {
|
||||
if (isLocal(configFile)) {
|
||||
if (configFile !== userConfigFromActionPath(tempDir)) {
|
||||
@@ -556,9 +544,14 @@ async function loadUserConfig(
|
||||
);
|
||||
}
|
||||
}
|
||||
return getLocalConfig(configFile);
|
||||
return getLocalConfig(logger, configFile, validateConfig);
|
||||
} else {
|
||||
return await getRemoteConfig(configFile, apiDetails);
|
||||
return await getRemoteConfig(
|
||||
logger,
|
||||
configFile,
|
||||
apiDetails,
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -723,7 +716,14 @@ export async function getOverlayDatabaseMode(
|
||||
buildMode !== BuildMode.None &&
|
||||
(
|
||||
await Promise.all(
|
||||
languages.map(async (l) => await codeql.isTracedLanguage(l)),
|
||||
languages.map(
|
||||
async (l) =>
|
||||
l !== KnownLanguage.go && // Workaround to allow overlay analysis for Go with any build
|
||||
// mode, since it does not yet support BMN. The Go autobuilder and/or extractor will
|
||||
// ensure that overlay-base databases are only created for supported Go build setups,
|
||||
// and that we'll fall back to full databases in other cases.
|
||||
(await codeql.isTracedLanguage(l)),
|
||||
),
|
||||
)
|
||||
).some(Boolean)
|
||||
) {
|
||||
@@ -787,7 +787,10 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||
export async function initConfig(
|
||||
features: FeatureEnablement,
|
||||
inputs: InitConfigInputs,
|
||||
): Promise<Config> {
|
||||
const { logger, tempDir } = inputs;
|
||||
|
||||
// if configInput is set, it takes precedence over configFile
|
||||
@@ -807,11 +810,14 @@ export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||
logger.debug("No configuration file was provided");
|
||||
} else {
|
||||
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
||||
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
|
||||
userConfig = await loadUserConfig(
|
||||
logger,
|
||||
inputs.configFile,
|
||||
inputs.workspacePath,
|
||||
inputs.apiDetails,
|
||||
tempDir,
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -905,7 +911,11 @@ function isLocal(configPath: string): boolean {
|
||||
return configPath.indexOf("@") === -1;
|
||||
}
|
||||
|
||||
function getLocalConfig(configFile: string): UserConfig {
|
||||
function getLocalConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
validateConfig: boolean,
|
||||
): UserConfig {
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new ConfigurationError(
|
||||
@@ -913,12 +923,19 @@ function getLocalConfig(configFile: string): UserConfig {
|
||||
);
|
||||
}
|
||||
|
||||
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
|
||||
return parseUserConfig(
|
||||
logger,
|
||||
configFile,
|
||||
fs.readFileSync(configFile, "utf-8"),
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
async function getRemoteConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
validateConfig: boolean,
|
||||
): Promise<UserConfig> {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp(
|
||||
@@ -926,7 +943,7 @@ async function getRemoteConfig(
|
||||
);
|
||||
const pieces = format.exec(configFile);
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
if (pieces?.groups === undefined || pieces.length < 5) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
||||
);
|
||||
@@ -954,9 +971,12 @@ async function getRemoteConfig(
|
||||
);
|
||||
}
|
||||
|
||||
return yaml.load(
|
||||
return parseUserConfig(
|
||||
logger,
|
||||
configFile,
|
||||
Buffer.from(fileContents, "base64").toString("binary"),
|
||||
) as UserConfig;
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,7 +2,13 @@ import test, { ExecutionContext } from "ava";
|
||||
|
||||
import { RepositoryProperties } from "../feature-flags/properties";
|
||||
import { KnownLanguage, Language } from "../languages";
|
||||
import { prettyPrintPack } from "../util";
|
||||
import { getRunnerLogger } from "../logging";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
} from "../testing-utils";
|
||||
import { ConfigurationError, prettyPrintPack } from "../util";
|
||||
|
||||
import * as dbConfig from "./db-config";
|
||||
|
||||
@@ -391,3 +397,111 @@ test(
|
||||
{},
|
||||
/"a-pack-without-a-scope" is not a valid pack/,
|
||||
);
|
||||
|
||||
test("parseUserConfig - successfully parses valid YAML", (t) => {
|
||||
const result = dbConfig.parseUserConfig(
|
||||
getRunnerLogger(true),
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries:
|
||||
- uses: foo
|
||||
some-unknown-option: true
|
||||
`,
|
||||
true,
|
||||
);
|
||||
t.truthy(result);
|
||||
if (t.truthy(result["paths-ignore"])) {
|
||||
t.is(result["paths-ignore"].length, 1);
|
||||
t.is(result["paths-ignore"][0], "some/path");
|
||||
}
|
||||
if (t.truthy(result["queries"])) {
|
||||
t.is(result["queries"].length, 1);
|
||||
t.deepEqual(result["queries"][0], { uses: "foo" });
|
||||
}
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
|
||||
t.throws(
|
||||
() =>
|
||||
dbConfig.parseUserConfig(
|
||||
getRunnerLogger(true),
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries:
|
||||
- foo
|
||||
`,
|
||||
true,
|
||||
),
|
||||
{
|
||||
instanceOf: ConfigurationError,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.notThrows(() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
query-filters:
|
||||
- something
|
||||
- include: foo
|
||||
- exclude: bar
|
||||
`,
|
||||
true,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries: true
|
||||
`,
|
||||
true,
|
||||
),
|
||||
{
|
||||
instanceOf: ConfigurationError,
|
||||
message:
|
||||
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
|
||||
},
|
||||
);
|
||||
|
||||
const expectedMessages = ["instance.queries is not of a type(s) array"];
|
||||
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.notThrows(() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries: true
|
||||
`,
|
||||
false,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import * as path from "path";
|
||||
|
||||
import * as yaml from "js-yaml";
|
||||
import * as jsonschema from "jsonschema";
|
||||
import * as semver from "semver";
|
||||
|
||||
import * as errorMessages from "../error-messages";
|
||||
@@ -378,10 +380,7 @@ function combineQueries(
|
||||
const result: QuerySpec[] = [];
|
||||
|
||||
// Query settings obtained from the repository properties have the highest precedence.
|
||||
if (
|
||||
augmentationProperties.repoPropertyQueries &&
|
||||
augmentationProperties.repoPropertyQueries.input
|
||||
) {
|
||||
if (augmentationProperties.repoPropertyQueries?.input) {
|
||||
logger.info(
|
||||
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
||||
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
||||
@@ -474,3 +473,53 @@ export function generateCodeScanningConfig(
|
||||
|
||||
return augmentedConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to parse `contents` into a `UserConfig` value.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
|
||||
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
|
||||
* @param validateConfig Whether to validate the configuration file against the schema.
|
||||
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
|
||||
* @throws A `ConfigurationError` if parsing failed.
|
||||
*/
|
||||
export function parseUserConfig(
|
||||
logger: Logger,
|
||||
pathInput: string,
|
||||
contents: string,
|
||||
validateConfig: boolean,
|
||||
): UserConfig {
|
||||
try {
|
||||
const schema =
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
require("../../src/db-config-schema.json") as jsonschema.Schema;
|
||||
|
||||
const doc = yaml.load(contents);
|
||||
|
||||
if (validateConfig) {
|
||||
const result = new jsonschema.Validator().validate(doc, schema);
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
for (const error of result.errors) {
|
||||
logger.error(error.stack);
|
||||
}
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getInvalidConfigFileMessage(
|
||||
pathInput,
|
||||
result.errors.map((e) => e.stack),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return doc as UserConfig;
|
||||
} catch (error) {
|
||||
if (error instanceof yaml.YAMLException) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import * as apiClient from "./api-client";
|
||||
import { createStubCodeQL } from "./codeql";
|
||||
@@ -108,6 +109,39 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
await mockHttpRequests(201);
|
||||
|
||||
const loggedMessages = [];
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getCodeQL(),
|
||||
{
|
||||
...getTestConfig(tmpDir),
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
},
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages),
|
||||
);
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
|
||||
) !== undefined,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if running against GHES", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
@@ -22,6 +23,13 @@ export async function uploadDatabases(
|
||||
return;
|
||||
}
|
||||
|
||||
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
|
||||
logger.debug(
|
||||
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (util.isInTestMode()) {
|
||||
logger.debug("In test mode. Skipping database upload.");
|
||||
return;
|
||||
|
||||
145
src/db-config-schema.json
Normal file
145
src/db-config-schema.json
Normal file
@@ -0,0 +1,145 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "CodeQL Database Configuration",
|
||||
"description": "Format of the config file supplied by the user for CodeQL analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the configuration"
|
||||
},
|
||||
"disable-default-queries": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to disable default queries"
|
||||
},
|
||||
"queries": {
|
||||
"type": "array",
|
||||
"description": "List of additional queries to run",
|
||||
"items": {
|
||||
"$ref": "#/definitions/QuerySpec"
|
||||
}
|
||||
},
|
||||
"paths-ignore": {
|
||||
"type": "array",
|
||||
"description": "Paths to ignore during analysis",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"type": "array",
|
||||
"description": "Paths to include in analysis",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"packs": {
|
||||
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"query-filters": {
|
||||
"type": "array",
|
||||
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
|
||||
"items": {
|
||||
"$ref": "#/definitions/QueryFilter"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"definitions": {
|
||||
"QuerySpec": {
|
||||
"type": "object",
|
||||
"description": "Detailed query specification object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Optional name for the query"
|
||||
},
|
||||
"uses": {
|
||||
"type": "string",
|
||||
"description": "The query or query suite to use"
|
||||
}
|
||||
},
|
||||
"required": ["uses"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"QueryFilter": {
|
||||
"description": "Query filter that can either include or exclude queries",
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExcludeQueryFilter"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/IncludeQueryFilter"
|
||||
},
|
||||
{}
|
||||
]
|
||||
},
|
||||
"ExcludeQueryFilter": {
|
||||
"type": "object",
|
||||
"description": "Filter to exclude queries",
|
||||
"properties": {
|
||||
"exclude": {
|
||||
"type": "object",
|
||||
"description": "Queries to exclude",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["exclude"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"IncludeQueryFilter": {
|
||||
"type": "object",
|
||||
"description": "Filter to include queries",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "object",
|
||||
"description": "Queries to include",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["include"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
||||
"cliVersion": "2.23.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
||||
"priorCliVersion": "2.23.2"
|
||||
}
|
||||
|
||||
@@ -47,6 +47,9 @@ export enum EnvVar {
|
||||
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
||||
HAS_WARNED_ABOUT_DISK_SPACE = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE",
|
||||
|
||||
/** Whether the `setup-codeql` action has been run. */
|
||||
SETUP_CODEQL_ACTION_HAS_RUN = "CODEQL_ACTION_SETUP_CODEQL_HAS_RUN",
|
||||
|
||||
/** Whether the init action has been run. */
|
||||
INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN",
|
||||
|
||||
@@ -128,4 +131,10 @@ export enum EnvVar {
|
||||
* whether the upload is disabled. This is intended for testing and debugging purposes.
|
||||
*/
|
||||
SARIF_DUMP_DIR = "CODEQL_ACTION_SARIF_DUMP_DIR",
|
||||
|
||||
/**
|
||||
* Whether to skip uploading SARIF results to GitHub. Intended for testing purposes.
|
||||
* This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option.
|
||||
*/
|
||||
SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD",
|
||||
}
|
||||
|
||||
@@ -14,6 +14,22 @@ export function getConfigFileDoesNotExistErrorMessage(
|
||||
return `The configuration file "${configFile}" does not exist`;
|
||||
}
|
||||
|
||||
export function getConfigFileParseErrorMessage(
|
||||
configFile: string,
|
||||
message: string,
|
||||
): string {
|
||||
return `Cannot parse "${configFile}": ${message}`;
|
||||
}
|
||||
|
||||
export function getInvalidConfigFileMessage(
|
||||
configFile: string,
|
||||
messages: string[],
|
||||
): string {
|
||||
const andMore =
|
||||
messages.length > 10 ? `, and ${messages.length - 10} more.` : ".";
|
||||
return `The configuration file "${configFile}" is invalid: ${messages.slice(0, 10).join(", ")}${andMore}`;
|
||||
}
|
||||
|
||||
export function getConfigFileRepoFormatInvalidMessage(
|
||||
configFile: string,
|
||||
): string {
|
||||
|
||||
@@ -43,6 +43,8 @@ export interface FeatureEnablement {
|
||||
* Legacy features should end with `_enabled`.
|
||||
*/
|
||||
export enum Feature {
|
||||
AllowToolcacheInput = "allow_toolcache_input",
|
||||
AnalyzeUseNewUpload = "analyze_use_new_upload",
|
||||
CleanupTrapCaches = "cleanup_trap_caches",
|
||||
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
||||
DiffInformedQueries = "diff_informed_queries",
|
||||
@@ -73,9 +75,10 @@ export enum Feature {
|
||||
OverlayAnalysisRust = "overlay_analysis_rust",
|
||||
OverlayAnalysisSwift = "overlay_analysis_swift",
|
||||
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
|
||||
UseRepositoryProperties = "use_repository_properties",
|
||||
QaTelemetryEnabled = "qa_telemetry_enabled",
|
||||
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
|
||||
UseRepositoryProperties = "use_repository_properties",
|
||||
ValidateDbConfig = "validate_db_config",
|
||||
}
|
||||
|
||||
export const featureConfig: Record<
|
||||
@@ -109,6 +112,16 @@ export const featureConfig: Record<
|
||||
toolsFeature?: ToolsFeature;
|
||||
}
|
||||
> = {
|
||||
[Feature.AllowToolcacheInput]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.AnalyzeUseNewUpload]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.CleanupTrapCaches]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||
@@ -281,6 +294,11 @@ export const featureConfig: Record<
|
||||
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
||||
minimumVersion: "2.23.0",
|
||||
},
|
||||
[Feature.ValidateDbConfig]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -635,7 +653,7 @@ class GitHubFeatureFlags {
|
||||
}
|
||||
|
||||
this.logger.debug(
|
||||
"Loaded the following default values for the feature flags from the Code Scanning API:",
|
||||
"Loaded the following default values for the feature flags from the CodeQL Action API:",
|
||||
);
|
||||
for (const [feature, value] of Object.entries(remoteFlags).sort(
|
||||
([nameA], [nameB]) => nameA.localeCompare(nameB),
|
||||
@@ -645,12 +663,13 @@ class GitHubFeatureFlags {
|
||||
this.hasAccessedRemoteFeatureFlags = true;
|
||||
return remoteFlags;
|
||||
} catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 403) {
|
||||
const httpError = util.asHTTPError(e);
|
||||
if (httpError?.status === 403) {
|
||||
this.logger.warning(
|
||||
"This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
|
||||
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`,
|
||||
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
|
||||
);
|
||||
this.hasAccessedRemoteFeatureFlags = false;
|
||||
return {};
|
||||
|
||||
@@ -2,6 +2,7 @@ import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import * as codeql from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { Feature } from "./feature-flags";
|
||||
@@ -28,12 +29,13 @@ test("post: init action with debug mode off", async (t) => {
|
||||
const gitHubVersion: util.GitHubVersion = {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
};
|
||||
sinon.stub(configUtils, "getConfig").resolves({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
packs: [],
|
||||
} as unknown as configUtils.Config);
|
||||
sinon.stub(configUtils, "getConfig").resolves(
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
gitHubVersion,
|
||||
languages: [],
|
||||
}),
|
||||
);
|
||||
|
||||
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
||||
const printDebugLogsSpy = sinon.spy();
|
||||
@@ -295,6 +297,17 @@ test("uploading failed SARIF run fails when workflow does not reference github/c
|
||||
t.truthy(result.upload_failed_run_stack_trace);
|
||||
});
|
||||
|
||||
test("not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", async (t) => {
|
||||
const result = await testFailedSarifUpload(t, createTestWorkflow([]), {
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
expectUpload: false,
|
||||
});
|
||||
t.is(
|
||||
result.upload_failed_run_skipped_because,
|
||||
"Code Scanning is not enabled.",
|
||||
);
|
||||
});
|
||||
|
||||
function createTestWorkflow(
|
||||
steps: workflow.WorkflowJobStep[],
|
||||
): workflow.Workflow {
|
||||
@@ -327,20 +340,22 @@ async function testFailedSarifUpload(
|
||||
expectUpload = true,
|
||||
exportDiagnosticsEnabled = false,
|
||||
matrix = {},
|
||||
analysisKinds = [AnalysisKind.CodeScanning],
|
||||
}: {
|
||||
category?: string;
|
||||
databaseExists?: boolean;
|
||||
expectUpload?: boolean;
|
||||
exportDiagnosticsEnabled?: boolean;
|
||||
matrix?: { [key: string]: string };
|
||||
analysisKinds?: AnalysisKind[];
|
||||
} = {},
|
||||
): Promise<initActionPostHelper.UploadFailedSarifResult> {
|
||||
const config = {
|
||||
const config = createTestConfig({
|
||||
analysisKinds,
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
} as unknown as configUtils.Config;
|
||||
});
|
||||
if (databaseExists) {
|
||||
config.dbLocation = "path/to/database";
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import * as actionsUtil from "./actions-util";
|
||||
import { CodeScanning } from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { Config, isCodeScanningEnabled } from "./config-utils";
|
||||
import * as dependencyCaching from "./dependency-caching";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
@@ -19,8 +19,8 @@ import {
|
||||
delay,
|
||||
getErrorMessage,
|
||||
getRequiredEnvParam,
|
||||
isInTestMode,
|
||||
parseMatrixInput,
|
||||
shouldSkipSarifUpload,
|
||||
wrapError,
|
||||
} from "./util";
|
||||
import {
|
||||
@@ -81,7 +81,7 @@ async function maybeUploadFailedSarif(
|
||||
!["always", "failure-only"].includes(
|
||||
actionsUtil.getUploadValue(shouldUpload),
|
||||
) ||
|
||||
isInTestMode()
|
||||
shouldSkipSarifUpload()
|
||||
) {
|
||||
return { upload_failed_run_skipped_because: "SARIF upload is disabled" };
|
||||
}
|
||||
@@ -139,6 +139,15 @@ export async function tryUploadSarifIfRunFailed(
|
||||
EnvVar.JOB_STATUS,
|
||||
process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus,
|
||||
);
|
||||
|
||||
// If the only enabled analysis kind is `code-quality`, then we shouldn't
|
||||
// upload the failed SARIF to Code Scanning.
|
||||
if (!isCodeScanningEnabled(config)) {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Code Scanning is not enabled.",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
return await maybeUploadFailedSarif(
|
||||
config,
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
getTemporaryDirectory,
|
||||
persistInputs,
|
||||
} from "./actions-util";
|
||||
import { AnalysisKind, getAnalysisKinds } from "./analyses";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import {
|
||||
getDependencyCachingEnabled,
|
||||
@@ -56,6 +57,7 @@ import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
ActionName,
|
||||
InitStatusReport,
|
||||
InitToolsDownloadFields,
|
||||
InitWithConfigStatusReport,
|
||||
createInitWithConfigStatusReport,
|
||||
createStatusReportBase,
|
||||
@@ -86,14 +88,29 @@ import {
|
||||
} from "./util";
|
||||
import { validateWorkflow } from "./workflow";
|
||||
|
||||
/** Fields of the init status report populated when the tools source is `download`. */
|
||||
interface InitToolsDownloadFields {
|
||||
/** Time taken to download the bundle, in milliseconds. */
|
||||
tools_download_duration_ms?: number;
|
||||
/**
|
||||
* Whether the relevant tools dotcom feature flags have been misconfigured.
|
||||
* Only populated if we attempt to determine the default version based on the dotcom feature flags. */
|
||||
tools_feature_flags_valid?: boolean;
|
||||
/**
|
||||
* Sends a status report indicating that the `init` Action is starting.
|
||||
*
|
||||
* @param startedAt
|
||||
* @param config
|
||||
* @param logger
|
||||
*/
|
||||
async function sendStartingStatusReport(
|
||||
startedAt: Date,
|
||||
config: Partial<configUtils.Config> | undefined,
|
||||
logger: Logger,
|
||||
) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
async function sendCompletedStatusReport(
|
||||
@@ -210,6 +227,7 @@ async function run() {
|
||||
? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo)
|
||||
: {};
|
||||
|
||||
// Create a unique identifier for this run.
|
||||
const jobRunUuid = uuidV4();
|
||||
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
||||
core.exportVariable(EnvVar.JOB_RUN_UUID, jobRunUuid);
|
||||
@@ -227,17 +245,30 @@ async function run() {
|
||||
);
|
||||
|
||||
try {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
// Parsing the `analysis-kinds` input may throw a `ConfigurationError`, which we don't want before
|
||||
// we have called `sendStartingStatusReport` below. However, we want the analysis kinds for that status
|
||||
// report. To work around this, we ignore exceptions that are thrown here and then call `getAnalysisKinds`
|
||||
// a second time later. The second call will then throw the exception again. If `getAnalysisKinds` is
|
||||
// successful, the results are cached so that we don't duplicate the work in normal runs.
|
||||
let analysisKinds: AnalysisKind[] | undefined;
|
||||
try {
|
||||
analysisKinds = await getAnalysisKinds(logger);
|
||||
} catch (err) {
|
||||
logger.debug(
|
||||
`Failed to parse analysis kinds for 'starting' status report: ${getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Send a status report indicating that an analysis is starting.
|
||||
await sendStartingStatusReport(startedAt, { analysisKinds }, logger);
|
||||
|
||||
// Throw a `ConfigurationError` if the `setup-codeql` action has been run.
|
||||
if (process.env[EnvVar.SETUP_CODEQL_ACTION_HAS_RUN] === "true") {
|
||||
throw new ConfigurationError(
|
||||
`The 'init' action should not be run in the same workflow as 'setup-codeql'.`,
|
||||
);
|
||||
}
|
||||
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
@@ -248,6 +279,7 @@ async function run() {
|
||||
getTemporaryDirectory(),
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
@@ -292,21 +324,11 @@ async function run() {
|
||||
}
|
||||
}
|
||||
|
||||
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
||||
const qualityQueriesInput = getOptionalInput("quality-queries");
|
||||
|
||||
if (qualityQueriesInput !== undefined) {
|
||||
logger.warning(
|
||||
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
||||
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
||||
);
|
||||
}
|
||||
|
||||
config = await initConfig({
|
||||
analysisKindsInput: getRequiredInput("analysis-kinds"),
|
||||
analysisKinds = await getAnalysisKinds(logger);
|
||||
config = await initConfig(features, {
|
||||
analysisKinds,
|
||||
languagesInput: getOptionalInput("languages"),
|
||||
queriesInput: getOptionalInput("queries"),
|
||||
qualityQueriesInput,
|
||||
packsInput: getOptionalInput("packs"),
|
||||
buildModeInput: getOptionalInput("build-mode"),
|
||||
configFile,
|
||||
|
||||
@@ -9,7 +9,7 @@ import { getOptionalInput, isSelfHostedRunner } from "./actions-util";
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import { CodeQL, setupCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { CodeQLDefaultVersionInfo } from "./feature-flags";
|
||||
import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
@@ -23,6 +23,7 @@ export async function initCodeQL(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<{
|
||||
codeql: CodeQL;
|
||||
@@ -44,6 +45,7 @@ export async function initCodeQL(
|
||||
tempDir,
|
||||
variant,
|
||||
defaultCliVersion,
|
||||
features,
|
||||
logger,
|
||||
true,
|
||||
);
|
||||
@@ -59,10 +61,11 @@ export async function initCodeQL(
|
||||
}
|
||||
|
||||
export async function initConfig(
|
||||
features: FeatureEnablement,
|
||||
inputs: configUtils.InitConfigInputs,
|
||||
): Promise<configUtils.Config> {
|
||||
return await withGroupAsync("Load language configuration", async () => {
|
||||
return await configUtils.initConfig(inputs);
|
||||
return await configUtils.initConfig(features, inputs);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -34,15 +34,10 @@ export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
|
||||
* Actions Cache client library. Instead we place a limit on the uncompressed
|
||||
* size of the overlay-base database.
|
||||
*
|
||||
* Assuming 2.5:1 compression ratio, the 15 GB limit on uncompressed data would
|
||||
* translate to a limit of around 6 GB after compression. This is a high limit
|
||||
* compared to the default 10GB Actions Cache capacity, but enforcement of Actions
|
||||
* Cache quotas is not immediate.
|
||||
*
|
||||
* TODO: revisit this limit before removing the restriction for overlay analysis
|
||||
* to the `github` and `dsp-testing` orgs.
|
||||
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
|
||||
* translate to a limit of around 3 GB after compression.
|
||||
*/
|
||||
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15000;
|
||||
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
|
||||
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
|
||||
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
|
||||
|
||||
|
||||
196
src/setup-codeql-action.ts
Normal file
196
src/setup-codeql-action.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import * as core from "@actions/core";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
|
||||
import {
|
||||
getActionVersion,
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
} from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CodeQL } from "./codeql";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Features } from "./feature-flags";
|
||||
import { initCodeQL } from "./init";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
ActionName,
|
||||
InitStatusReport,
|
||||
InitToolsDownloadFields,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
} from "./status-report";
|
||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
checkForTimeout,
|
||||
checkGitHubVersionInRange,
|
||||
getRequiredEnvParam,
|
||||
initializeEnvironment,
|
||||
ConfigurationError,
|
||||
wrapError,
|
||||
checkActionVersion,
|
||||
getErrorMessage,
|
||||
} from "./util";
|
||||
|
||||
/**
|
||||
* Helper function to send a full status report for this action.
|
||||
*/
|
||||
async function sendCompletedStatusReport(
|
||||
startedAt: Date,
|
||||
toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined,
|
||||
toolsFeatureFlagsValid: boolean | undefined,
|
||||
toolsSource: ToolsSource,
|
||||
toolsVersion: string,
|
||||
logger: Logger,
|
||||
error?: Error,
|
||||
): Promise<void> {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.SetupCodeQL,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
error?.message,
|
||||
error?.stack,
|
||||
);
|
||||
|
||||
if (statusReportBase === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const initStatusReport: InitStatusReport = {
|
||||
...statusReportBase,
|
||||
tools_input: getOptionalInput("tools") || "",
|
||||
tools_resolved_version: toolsVersion,
|
||||
tools_source: toolsSource || ToolsSource.Unknown,
|
||||
workflow_languages: "",
|
||||
};
|
||||
|
||||
const initToolsDownloadFields: InitToolsDownloadFields = {};
|
||||
|
||||
if (toolsDownloadStatusReport?.downloadDurationMs !== undefined) {
|
||||
initToolsDownloadFields.tools_download_duration_ms =
|
||||
toolsDownloadStatusReport.downloadDurationMs;
|
||||
}
|
||||
if (toolsFeatureFlagsValid !== undefined) {
|
||||
initToolsDownloadFields.tools_feature_flags_valid = toolsFeatureFlagsValid;
|
||||
}
|
||||
|
||||
await sendStatusReport({ ...initStatusReport, ...initToolsDownloadFields });
|
||||
}
|
||||
|
||||
/** The main behaviour of this action. */
|
||||
async function run(): Promise<void> {
|
||||
const startedAt = new Date();
|
||||
const logger = getActionsLogger();
|
||||
initializeEnvironment(getActionVersion());
|
||||
|
||||
let codeql: CodeQL;
|
||||
let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined;
|
||||
let toolsFeatureFlagsValid: boolean | undefined;
|
||||
let toolsSource: ToolsSource;
|
||||
let toolsVersion: string;
|
||||
|
||||
const apiDetails = {
|
||||
auth: getRequiredInput("token"),
|
||||
externalRepoAuth: getOptionalInput("external-repository-token"),
|
||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
apiURL: getRequiredEnvParam("GITHUB_API_URL"),
|
||||
};
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||
checkActionVersion(getActionVersion(), gitHubVersion);
|
||||
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
getTemporaryDirectory(),
|
||||
logger,
|
||||
);
|
||||
|
||||
const jobRunUuid = uuidV4();
|
||||
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
||||
core.exportVariable(EnvVar.JOB_RUN_UUID, jobRunUuid);
|
||||
|
||||
try {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.SetupCodeQL,
|
||||
"starting",
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
||||
const initCodeQLResult = await initCodeQL(
|
||||
getOptionalInput("tools"),
|
||||
apiDetails,
|
||||
getTemporaryDirectory(),
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
toolsSource = initCodeQLResult.toolsSource;
|
||||
|
||||
core.setOutput("codeql-path", codeql.getPath());
|
||||
core.setOutput("codeql-version", (await codeql.getVersion()).version);
|
||||
|
||||
core.exportVariable(EnvVar.SETUP_CODEQL_ACTION_HAS_RUN, "true");
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.SetupCodeQL,
|
||||
error instanceof ConfigurationError ? "user-error" : "failure",
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
await sendCompletedStatusReport(
|
||||
startedAt,
|
||||
toolsDownloadStatusReport,
|
||||
toolsFeatureFlagsValid,
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
/** Run the action and catch any unhandled errors. */
|
||||
async function runWrapper(): Promise<void> {
|
||||
try {
|
||||
await run();
|
||||
} catch (error) {
|
||||
core.setFailed(`setup-codeql action failed: ${getErrorMessage(error)}`);
|
||||
}
|
||||
await checkForTimeout();
|
||||
}
|
||||
|
||||
void runWrapper();
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
@@ -12,6 +13,7 @@ import {
|
||||
LoggedMessage,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
createFeatures,
|
||||
getRecordingLogger,
|
||||
initializeFeatures,
|
||||
mockBundleDownloadApi,
|
||||
@@ -90,6 +92,8 @@ test("getCodeQLActionRepository", (t) => {
|
||||
});
|
||||
|
||||
test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const tagName = "codeql-bundle-v1.2.3";
|
||||
@@ -100,6 +104,7 @@ test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) =>
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -109,6 +114,8 @@ test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) =>
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns bundled CLI version when tools == linked", async (t) => {
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
@@ -117,6 +124,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == linked
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
@@ -128,6 +136,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == linked
|
||||
test("getCodeQLSource correctly returns bundled CLI version when tools == latest", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
@@ -137,6 +146,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == latest
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -161,6 +171,7 @@ test("getCodeQLSource correctly returns bundled CLI version when tools == latest
|
||||
test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use linked tools", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
// Stub the downloadCodeQL function to prevent downloading artefacts
|
||||
// during testing from being called.
|
||||
@@ -185,6 +196,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use
|
||||
"tmp/codeql_action_test/",
|
||||
GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -207,6 +219,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use
|
||||
test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to download a non-default bundle", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
const bundleUrl =
|
||||
"https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.16.0/codeql-bundle-linux64.tar.gz";
|
||||
@@ -235,6 +248,7 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
|
||||
"tmp/codeql_action_test/",
|
||||
GitHubVariant.DOTCOM,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -254,6 +268,160 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
|
||||
});
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([Feature.AllowToolcacheInput]);
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
|
||||
const latestToolcacheVersion = "3.2.1";
|
||||
const latestVersionPath = "/path/to/latest";
|
||||
const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"];
|
||||
const findAllVersionsStub = sinon
|
||||
.stub(toolcache, "findAllVersions")
|
||||
.returns(testVersions);
|
||||
const findStub = sinon.stub(toolcache, "find");
|
||||
findStub
|
||||
.withArgs("CodeQL", latestToolcacheVersion)
|
||||
.returns(latestVersionPath);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the toolcache functions were called with the expected arguments
|
||||
t.assert(
|
||||
findAllVersionsStub.calledOnceWith("CodeQL"),
|
||||
`toolcache.findAllVersions("CodeQL") wasn't called`,
|
||||
);
|
||||
t.assert(
|
||||
findStub.calledOnceWith("CodeQL", latestToolcacheVersion),
|
||||
`toolcache.find("CodeQL", ${latestToolcacheVersion}) wasn't called`,
|
||||
);
|
||||
|
||||
// Check that `sourceType` and `toolsVersion` match expectations.
|
||||
t.is(source.sourceType, "toolcache");
|
||||
t.is(source.toolsVersion, latestToolcacheVersion);
|
||||
|
||||
// Check that key messages we would expect to find in the log are present.
|
||||
const expectedMessages: string[] = [
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`,
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`,
|
||||
`Using CodeQL CLI version ${latestToolcacheVersion} from toolcache at ${latestVersionPath}`,
|
||||
];
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
t.assert(
|
||||
loggedMessages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const toolcacheInputFallbackMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
featureList: Feature[],
|
||||
environment: Record<string, string>,
|
||||
testVersions: string[],
|
||||
expectedMessages: string[],
|
||||
) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures(featureList);
|
||||
|
||||
for (const [k, v] of Object.entries(environment)) {
|
||||
process.env[k] = v;
|
||||
}
|
||||
|
||||
const findAllVersionsStub = sinon
|
||||
.stub(toolcache, "findAllVersions")
|
||||
.returns(testVersions);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the toolcache functions were called with the expected arguments
|
||||
t.assert(
|
||||
findAllVersionsStub.calledWith("CodeQL"),
|
||||
`toolcache.findAllVersions("CodeQL") wasn't called`,
|
||||
);
|
||||
|
||||
// Check that `sourceType` and `toolsVersion` match expectations.
|
||||
t.is(source.sourceType, "download");
|
||||
t.is(source.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||
|
||||
// Check that key messages we would expect to find in the log are present.
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
t.assert(
|
||||
loggedMessages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") =>
|
||||
`getCodeQLSource falls back to downloading the CLI if ${providedTitle}`,
|
||||
});
|
||||
|
||||
test(
|
||||
"the toolcache doesn't have a CodeQL CLI when tools == toolcache",
|
||||
toolcacheInputFallbackMacro,
|
||||
[Feature.AllowToolcacheInput],
|
||||
{ GITHUB_EVENT_NAME: "dynamic" },
|
||||
[],
|
||||
[
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`,
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: toolcache'...`,
|
||||
],
|
||||
);
|
||||
|
||||
test(
|
||||
"the workflow trigger is not `dynamic`",
|
||||
toolcacheInputFallbackMacro,
|
||||
[Feature.AllowToolcacheInput],
|
||||
{ GITHUB_EVENT_NAME: "pull_request" },
|
||||
[],
|
||||
[
|
||||
`Ignoring 'tools: toolcache' because the workflow was not triggered dynamically.`,
|
||||
],
|
||||
);
|
||||
|
||||
test(
|
||||
"the feature flag is not enabled",
|
||||
toolcacheInputFallbackMacro,
|
||||
[],
|
||||
{ GITHUB_EVENT_NAME: "dynamic" },
|
||||
[],
|
||||
[`Ignoring 'tools: toolcache' because the feature is not enabled.`],
|
||||
);
|
||||
|
||||
test('tryGetTagNameFromUrl extracts the right tag name for a repo name containing "codeql-bundle"', (t) => {
|
||||
t.is(
|
||||
setupCodeql.tryGetTagNameFromUrl(
|
||||
@@ -263,3 +431,15 @@ test('tryGetTagNameFromUrl extracts the right tag name for a repo name containin
|
||||
"codeql-bundle-v2.19.0",
|
||||
);
|
||||
});
|
||||
|
||||
test("getLatestToolcacheVersion returns undefined if there are no CodeQL CLIs in the toolcache", (t) => {
|
||||
sinon.stub(toolcache, "findAllVersions").returns([]);
|
||||
t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), undefined);
|
||||
});
|
||||
|
||||
test("getLatestToolcacheVersion returns latest version in the toolcache", (t) => {
|
||||
const testVersions = ["2.3.1", "3.2.1", "1.2.3"];
|
||||
sinon.stub(toolcache, "findAllVersions").returns(testVersions);
|
||||
|
||||
t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), "3.2.1");
|
||||
});
|
||||
|
||||
@@ -7,12 +7,14 @@ import { default as deepEqual } from "fast-deep-equal";
|
||||
import * as semver from "semver";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
|
||||
import { isRunningLocalAction } from "./actions-util";
|
||||
import { isDynamicWorkflow, isRunningLocalAction } from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import {
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
CodeQLDefaultVersionInfo,
|
||||
Feature,
|
||||
FeatureEnablement,
|
||||
} from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as tar from "./tar";
|
||||
@@ -38,6 +40,7 @@ const CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
|
||||
|
||||
const CODEQL_BUNDLE_VERSION_ALIAS: string[] = ["linked", "latest"];
|
||||
const CODEQL_NIGHTLY_TOOLS_INPUTS = ["nightly", "nightly-latest"];
|
||||
const CODEQL_TOOLCACHE_INPUT = "toolcache";
|
||||
|
||||
function getCodeQLBundleExtension(
|
||||
compressionMethod: tar.CompressionMethod,
|
||||
@@ -165,7 +168,7 @@ export function tryGetTagNameFromUrl(
|
||||
// assumes less about the structure of the URL.
|
||||
const match = matches[matches.length - 1];
|
||||
|
||||
if (match === null || match.length !== 2) {
|
||||
if (match?.length !== 2) {
|
||||
logger.debug(
|
||||
`Could not determine tag name for URL ${url}. Matched ${JSON.stringify(
|
||||
match,
|
||||
@@ -275,6 +278,7 @@ export async function getCodeQLSource(
|
||||
apiDetails: api.GitHubApiDetails,
|
||||
variant: util.GitHubVariant,
|
||||
tarSupportsZstd: boolean,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<CodeQLToolsSource> {
|
||||
if (
|
||||
@@ -346,6 +350,54 @@ export async function getCodeQLSource(
|
||||
"`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required.",
|
||||
);
|
||||
}
|
||||
} else if (
|
||||
toolsInput !== undefined &&
|
||||
toolsInput === CODEQL_TOOLCACHE_INPUT
|
||||
) {
|
||||
let latestToolcacheVersion: string | undefined;
|
||||
|
||||
// We only allow `toolsInput === "toolcache"` for `dynamic` events. In general, using `toolsInput === "toolcache"`
|
||||
// can lead to alert wobble and so it shouldn't be used for an analysis where results are intended to be uploaded.
|
||||
// We also allow this in test mode.
|
||||
const allowToolcacheValueFF = await features.getValue(
|
||||
Feature.AllowToolcacheInput,
|
||||
);
|
||||
const allowToolcacheValue =
|
||||
allowToolcacheValueFF && (isDynamicWorkflow() || util.isInTestMode());
|
||||
if (allowToolcacheValue) {
|
||||
// If `toolsInput === "toolcache"`, try to find the latest version of the CLI that's available in the toolcache
|
||||
// and use that. We perform this check here since we can set `cliVersion` directly and don't want to default to
|
||||
// the linked version.
|
||||
logger.info(
|
||||
`Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: ${toolsInput}'.`,
|
||||
);
|
||||
|
||||
latestToolcacheVersion = getLatestToolcacheVersion(logger);
|
||||
if (latestToolcacheVersion) {
|
||||
cliVersion = latestToolcacheVersion;
|
||||
}
|
||||
}
|
||||
|
||||
if (latestToolcacheVersion === undefined) {
|
||||
if (allowToolcacheValue) {
|
||||
logger.info(
|
||||
`Found no CodeQL CLI in the toolcache, ignoring 'tools: ${toolsInput}'...`,
|
||||
);
|
||||
} else {
|
||||
if (allowToolcacheValueFF) {
|
||||
logger.warning(
|
||||
`Ignoring 'tools: ${toolsInput}' because the workflow was not triggered dynamically.`,
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Ignoring 'tools: ${toolsInput}' because the feature is not enabled.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
cliVersion = defaultCliVersion.cliVersion;
|
||||
tagName = defaultCliVersion.tagName;
|
||||
}
|
||||
} else if (toolsInput !== undefined) {
|
||||
// If a tools URL was provided, then use that.
|
||||
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||
@@ -696,6 +748,7 @@ export async function setupCodeQLBundle(
|
||||
tempDir: string,
|
||||
variant: util.GitHubVariant,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
) {
|
||||
if (!(await util.isBinaryAccessible("tar", logger))) {
|
||||
@@ -711,6 +764,7 @@ export async function setupCodeQLBundle(
|
||||
apiDetails,
|
||||
variant,
|
||||
zstdAvailability.available,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -816,9 +870,38 @@ async function getNightlyToolsUrl(logger: Logger) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the latest version of the CodeQL CLI that is available in the toolcache, or `undefined`
|
||||
* if no CodeQL CLI is available in the toolcache.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @returns The latest version of the CodeQL CLI that is available in the toolcache, or `undefined` if there is none.
|
||||
*/
|
||||
export function getLatestToolcacheVersion(logger: Logger): string | undefined {
|
||||
const allVersions = toolcache
|
||||
.findAllVersions("CodeQL")
|
||||
.sort((a, b) => semver.compare(b, a));
|
||||
logger.debug(
|
||||
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
|
||||
allVersions,
|
||||
)}.`,
|
||||
);
|
||||
|
||||
if (allVersions.length > 0) {
|
||||
const latestToolcacheVersion = allVersions[0];
|
||||
logger.info(
|
||||
`CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`,
|
||||
);
|
||||
return latestToolcacheVersion;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isReservedToolsValue(tools: string): boolean {
|
||||
return (
|
||||
CODEQL_BUNDLE_VERSION_ALIAS.includes(tools) ||
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools)
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(tools) ||
|
||||
tools === CODEQL_TOOLCACHE_INPUT
|
||||
);
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ async function runWrapper() {
|
||||
logger,
|
||||
);
|
||||
|
||||
if ((config && config.debugMode) || core.isDebug()) {
|
||||
if (config?.debugMode || core.isDebug()) {
|
||||
const logFilePath = core.getState("proxy-log-file");
|
||||
logger.info(
|
||||
"Debug mode is on. Uploading proxy log as Actions debugging artifact...",
|
||||
|
||||
@@ -7,11 +7,14 @@ import { pki } from "node-forge";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getApiDetails, getAuthorizationHeaderFor } from "./api-client";
|
||||
import { Config } from "./config-utils";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import {
|
||||
Credential,
|
||||
getCredentials,
|
||||
getDownloadUrl,
|
||||
parseLanguage,
|
||||
UPDATEJOB_PROXY,
|
||||
} from "./start-proxy";
|
||||
import {
|
||||
@@ -98,6 +101,7 @@ interface StartProxyStatus extends StatusReportBase {
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
config: Partial<Config>,
|
||||
registry_types: string[],
|
||||
logger: Logger,
|
||||
) {
|
||||
@@ -105,7 +109,7 @@ async function sendSuccessStatusReport(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
startedAt,
|
||||
undefined,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
@@ -125,6 +129,7 @@ async function runWrapper() {
|
||||
actionsUtil.persistInputs();
|
||||
|
||||
const logger = getActionsLogger();
|
||||
let language: KnownLanguage | undefined;
|
||||
|
||||
try {
|
||||
// Setup logging for the proxy
|
||||
@@ -133,11 +138,13 @@ async function runWrapper() {
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
|
||||
// Get the configuration options
|
||||
const languageInput = actionsUtil.getOptionalInput("language");
|
||||
language = languageInput ? parseLanguage(languageInput) : undefined;
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
actionsUtil.getOptionalInput("registries_credentials"),
|
||||
actionsUtil.getOptionalInput("language"),
|
||||
language,
|
||||
);
|
||||
|
||||
if (credentials.length === 0) {
|
||||
@@ -165,6 +172,9 @@ async function runWrapper() {
|
||||
// Report success if we have reached this point.
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
proxyConfig.all_credentials.map((c) => c.type),
|
||||
logger,
|
||||
);
|
||||
@@ -178,7 +188,9 @@ async function runWrapper() {
|
||||
ActionName.StartProxy,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
undefined,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -109,7 +109,7 @@ test("getCredentials filters by language when specified", async (t) => {
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON(mixedCredentials),
|
||||
"java",
|
||||
KnownLanguage.java,
|
||||
);
|
||||
t.is(credentials.length, 1);
|
||||
t.is(credentials[0].type, "maven_repository");
|
||||
@@ -120,7 +120,7 @@ test("getCredentials returns all for a language when specified", async (t) => {
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON(mixedCredentials),
|
||||
"go",
|
||||
KnownLanguage.go,
|
||||
);
|
||||
t.is(credentials.length, 2);
|
||||
|
||||
|
||||
@@ -79,9 +79,8 @@ export function getCredentials(
|
||||
logger: Logger,
|
||||
registrySecrets: string | undefined,
|
||||
registriesCredentials: string | undefined,
|
||||
languageString: string | undefined,
|
||||
language: KnownLanguage | undefined,
|
||||
): Credential[] {
|
||||
const language = languageString ? parseLanguage(languageString) : undefined;
|
||||
const registryTypeForLanguage = language
|
||||
? LANGUAGE_TO_REGISTRY_TYPE[language]
|
||||
: undefined;
|
||||
|
||||
@@ -92,6 +92,49 @@ test("createStatusReportBase", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase - empty configuration", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
{},
|
||||
{ numAvailableBytes: 100, numTotalBytes: 500 },
|
||||
getRunnerLogger(false),
|
||||
);
|
||||
|
||||
if (t.truthy(statusReport)) {
|
||||
t.is(statusReport.action_name, ActionName.StartProxy);
|
||||
t.is(statusReport.status, "success");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase - partial configuration", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
|
||||
const statusReport = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
new Date("May 19, 2023 05:19:00"),
|
||||
{
|
||||
languages: ["go"],
|
||||
},
|
||||
{ numAvailableBytes: 100, numTotalBytes: 500 },
|
||||
getRunnerLogger(false),
|
||||
);
|
||||
|
||||
if (t.truthy(statusReport)) {
|
||||
t.is(statusReport.action_name, ActionName.StartProxy);
|
||||
t.is(statusReport.status, "success");
|
||||
t.is(statusReport.languages, "go");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("createStatusReportBase_firstParty", async (t) => {
|
||||
await withTmpDir(async (tmpDir: string) => {
|
||||
setupEnvironmentAndStub(tmpDir);
|
||||
|
||||
@@ -23,7 +23,6 @@ import { getRepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
ConfigurationError,
|
||||
isHTTPError,
|
||||
getRequiredEnvParam,
|
||||
getCachedCodeQlVersion,
|
||||
isInTestMode,
|
||||
@@ -33,6 +32,7 @@ import {
|
||||
BuildMode,
|
||||
getErrorMessage,
|
||||
getTestingEnvironment,
|
||||
asHTTPError,
|
||||
} from "./util";
|
||||
|
||||
export enum ActionName {
|
||||
@@ -41,6 +41,7 @@ export enum ActionName {
|
||||
Init = "init",
|
||||
InitPost = "init-post",
|
||||
ResolveEnvironment = "resolve-environment",
|
||||
SetupCodeQL = "setup-codeql",
|
||||
StartProxy = "start-proxy",
|
||||
UploadSarif = "upload-sarif",
|
||||
}
|
||||
@@ -260,7 +261,7 @@ export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
status: ActionStatus,
|
||||
actionStartedAt: Date,
|
||||
config: Config | undefined,
|
||||
config: Partial<Config> | undefined,
|
||||
diskInfo: DiskUsage | undefined,
|
||||
logger: Logger,
|
||||
cause?: string,
|
||||
@@ -299,7 +300,7 @@ export async function createStatusReportBase(
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_kinds: config?.analysisKinds.join(","),
|
||||
analysis_kinds: config?.analysisKinds?.join(","),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
@@ -324,7 +325,7 @@ export async function createStatusReportBase(
|
||||
}
|
||||
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
statusReport.languages = config.languages?.join(",");
|
||||
}
|
||||
|
||||
if (diskInfo) {
|
||||
@@ -386,9 +387,9 @@ export async function createStatusReportBase(
|
||||
}
|
||||
|
||||
const OUT_OF_DATE_MSG =
|
||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`.";
|
||||
const INCOMPATIBLE_MSG =
|
||||
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||
"CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`.";
|
||||
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
@@ -428,8 +429,9 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
||||
},
|
||||
);
|
||||
} catch (e) {
|
||||
if (isHTTPError(e)) {
|
||||
switch (e.status) {
|
||||
const httpError = asHTTPError(e);
|
||||
if (httpError !== undefined) {
|
||||
switch (httpError.status) {
|
||||
case 403:
|
||||
if (
|
||||
getWorkflowEventName() === "push" &&
|
||||
@@ -437,16 +439,20 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
||||
) {
|
||||
core.warning(
|
||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||
"Uploading Code Scanning results requires write access. " +
|
||||
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
"Uploading CodeQL results requires write access. " +
|
||||
'To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||
`See ${DocUrl.SCANNING_ON_PUSH} for more information on how to configure these events.`,
|
||||
);
|
||||
} else {
|
||||
core.warning(e.message);
|
||||
core.warning(
|
||||
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
|
||||
);
|
||||
}
|
||||
return;
|
||||
case 404:
|
||||
core.warning(e.message);
|
||||
core.warning(httpError.message);
|
||||
return;
|
||||
case 422:
|
||||
// schema incompatibility when reporting status
|
||||
@@ -464,7 +470,7 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
||||
// something else has gone wrong and the request/response will be logged by octokit
|
||||
// it's possible this is a transient error and we should continue scanning
|
||||
core.warning(
|
||||
`An unexpected error occurred when sending code scanning status report: ${getErrorMessage(
|
||||
`An unexpected error occurred when sending a status report: ${getErrorMessage(
|
||||
e,
|
||||
)}`,
|
||||
);
|
||||
@@ -516,6 +522,16 @@ export interface InitWithConfigStatusReport extends InitStatusReport {
|
||||
config_file: string;
|
||||
}
|
||||
|
||||
/** Fields of the init status report populated when the tools source is `download`. */
|
||||
export interface InitToolsDownloadFields {
|
||||
/** Time taken to download the bundle, in milliseconds. */
|
||||
tools_download_duration_ms?: number;
|
||||
/**
|
||||
* Whether the relevant tools dotcom feature flags have been misconfigured.
|
||||
* Only populated if we attempt to determine the default version based on the dotcom feature flags. */
|
||||
tools_feature_flags_valid?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Composes a `InitWithConfigStatusReport` from the given values.
|
||||
*
|
||||
|
||||
@@ -35,14 +35,14 @@ async function getTarVersion(): Promise<TarVersion> {
|
||||
// Return whether this is GNU tar or BSD tar, and the version number
|
||||
if (stdout.includes("GNU tar")) {
|
||||
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
|
||||
if (!match || !match[1]) {
|
||||
if (!match?.[1]) {
|
||||
throw new Error("Failed to parse output of tar --version.");
|
||||
}
|
||||
|
||||
return { type: "gnu", version: match[1] };
|
||||
} else if (stdout.includes("bsdtar")) {
|
||||
const match = stdout.match(/bsdtar ([0-9.]+)/);
|
||||
if (!match || !match[1]) {
|
||||
if (!match?.[1]) {
|
||||
throw new Error("Failed to parse output of tar --version.");
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { TextDecoder } from "node:util";
|
||||
import path from "path";
|
||||
|
||||
import * as github from "@actions/github";
|
||||
import { TestFn } from "ava";
|
||||
import { ExecutionContext, TestFn } from "ava";
|
||||
import nock from "nock";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
@@ -180,6 +180,23 @@ export function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
||||
};
|
||||
}
|
||||
|
||||
export function checkExpectedLogMessages(
|
||||
t: ExecutionContext<any>,
|
||||
messages: LoggedMessage[],
|
||||
expectedMessages: string[],
|
||||
) {
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
t.assert(
|
||||
messages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
||||
export function mockFeatureFlagApiEndpoint(
|
||||
responseStatusCode: number,
|
||||
|
||||
@@ -3,13 +3,11 @@ import * as semver from "semver";
|
||||
import type { VersionInfo } from "./codeql";
|
||||
|
||||
export enum ToolsFeature {
|
||||
AnalysisSummaryV2IsDefault = "analysisSummaryV2Default",
|
||||
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
|
||||
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
|
||||
ForceOverwrite = "forceOverwrite",
|
||||
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
|
||||
PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib",
|
||||
SarifMergeRunsFromEqualCategory = "sarifMergeRunsFromEqualCategory",
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user