mirror of
https://github.com/github/codeql-action.git
synced 2025-12-09 01:08:10 +08:00
Compare commits
186 Commits
codeql-bun
...
copilot/up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3f1383a26b | ||
|
|
580628ab68 | ||
|
|
17062a6896 | ||
|
|
aab1c2f931 | ||
|
|
8afe386a9f | ||
|
|
cbbc19b214 | ||
|
|
d363b1c2bf | ||
|
|
e7811794d3 | ||
|
|
c7a5b09374 | ||
|
|
cbcb06a3ae | ||
|
|
5fe9434cd2 | ||
|
|
8d50be301c | ||
|
|
237497c8f0 | ||
|
|
777daa0c71 | ||
|
|
74c8748a6f | ||
|
|
34c50c1d29 | ||
|
|
4ae68afd84 | ||
|
|
52a7bd7b6e | ||
|
|
194ba0ee2d | ||
|
|
53acf0b8aa | ||
|
|
ac9aeee226 | ||
|
|
d49e837b8c | ||
|
|
3d988b275a | ||
|
|
8cc18acfa4 | ||
|
|
ea5cb4a016 | ||
|
|
e1c8976a56 | ||
|
|
4256e2e2a0 | ||
|
|
66459ea37c | ||
|
|
1af9394995 | ||
|
|
311fc42780 | ||
|
|
284bf9b047 | ||
|
|
a53e78ee2a | ||
|
|
d84f470a9a | ||
|
|
41c0a26213 | ||
|
|
d4ba404a20 | ||
|
|
55895ef678 | ||
|
|
fe16891f40 | ||
|
|
57c7b6afb6 | ||
|
|
44aeac1a37 | ||
|
|
8b1e55d11e | ||
|
|
20900ee769 | ||
|
|
ad8ad9829e | ||
|
|
239e305d18 | ||
|
|
9c39f0afb0 | ||
|
|
fcc1377ac6 | ||
|
|
b5bbb5ab73 | ||
|
|
723a9469fd | ||
|
|
f9eed03ba2 | ||
|
|
df9e49e9e8 | ||
|
|
c9d47e2ee9 | ||
|
|
714962e17a | ||
|
|
42f957bb51 | ||
|
|
52cec4178d | ||
|
|
55c083790a | ||
|
|
50601762ea | ||
|
|
06fbd897c4 | ||
|
|
127851b399 | ||
|
|
8d77149e0c | ||
|
|
db47d17142 | ||
|
|
cc17bed958 | ||
|
|
91ec0ed58f | ||
|
|
4e0b2cd814 | ||
|
|
ae78991f55 | ||
|
|
dd565f3332 | ||
|
|
fa46f22b12 | ||
|
|
4e94bd11f7 | ||
|
|
8f11182164 | ||
|
|
1d36546c14 | ||
|
|
08ada26e6a | ||
|
|
b843cbeed0 | ||
|
|
1ecd563919 | ||
|
|
e576807920 | ||
|
|
ad35676669 | ||
|
|
d75645b13f | ||
|
|
66759e57b2 | ||
|
|
cbcae45fff | ||
|
|
710606cc35 | ||
|
|
f0452d5366 | ||
|
|
956c56734d | ||
|
|
b4ce335286 | ||
|
|
b9cd36824e | ||
|
|
c4b73722ba | ||
|
|
22d29ca74d | ||
|
|
9625890712 | ||
|
|
690d276755 | ||
|
|
1c3c8066c3 | ||
|
|
da64a41e37 | ||
|
|
8376af204a | ||
|
|
f48b54af10 | ||
|
|
40b4cdd21f | ||
|
|
e849c567ec | ||
|
|
d1b51f05c9 | ||
|
|
aed27f7231 | ||
|
|
8ff870a6c2 | ||
|
|
6f0fcbeea7 | ||
|
|
89d3359017 | ||
|
|
d79c0a1339 | ||
|
|
5e37670026 | ||
|
|
def04c1c0e | ||
|
|
12f3cfef09 | ||
|
|
c2bec36917 | ||
|
|
14139c9f77 | ||
|
|
596de7f1bc | ||
|
|
899bf2fd1e | ||
|
|
6fbdd5f4e9 | ||
|
|
489ed914f1 | ||
|
|
42642085de | ||
|
|
4bd7dfe989 | ||
|
|
ebd514f490 | ||
|
|
e5f165b8f5 | ||
|
|
c98d5a9a4f | ||
|
|
b7c814cb39 | ||
|
|
f88cb01694 | ||
|
|
3cd3374657 | ||
|
|
3934593862 | ||
|
|
bab3f2b5f5 | ||
|
|
9924f476ba | ||
|
|
bd5f49c7ca | ||
|
|
02b2c3aafc | ||
|
|
aa048acb05 | ||
|
|
0c5185d061 | ||
|
|
79ed9569a3 | ||
|
|
8e53c48f94 | ||
|
|
804fc665f9 | ||
|
|
e6e649a8f3 | ||
|
|
40e26468f3 | ||
|
|
9b0ac1cc3b | ||
|
|
ffed63adb8 | ||
|
|
bee06ec042 | ||
|
|
06f31ec789 | ||
|
|
53588c5ad2 | ||
|
|
2357c43cad | ||
|
|
a3ff966dbf | ||
|
|
6562050a4e | ||
|
|
e9daf5bcd9 | ||
|
|
c13672ee32 | ||
|
|
f2f52d0d47 | ||
|
|
08e53bec85 | ||
|
|
519594fe94 | ||
|
|
8c324fe288 | ||
|
|
a6b9514fab | ||
|
|
c64c4070cc | ||
|
|
d88a5540c3 | ||
|
|
aa0f6ea898 | ||
|
|
b03dcd5d9d | ||
|
|
16140ae1a1 | ||
|
|
30db5fee08 | ||
|
|
9ce56a247f | ||
|
|
2c8f4891d1 | ||
|
|
d7a8ae5fdd | ||
|
|
0822fb12e7 | ||
|
|
913cd47984 | ||
|
|
4f14649ced | ||
|
|
ac922ab562 | ||
|
|
66df0bc515 | ||
|
|
70205d3d12 | ||
|
|
697c209bfc | ||
|
|
1bd53ba38c | ||
|
|
cac4df0c79 | ||
|
|
77e5c0d0a2 | ||
|
|
97a4f751be | ||
|
|
2d5512b361 | ||
|
|
fa7bdf0559 | ||
|
|
57c7b0a884 | ||
|
|
4874f90a8d | ||
|
|
5a9e92afca | ||
|
|
9bd9b03572 | ||
|
|
3569065d7e | ||
|
|
c0e8887d5a | ||
|
|
3c8d00aea0 | ||
|
|
bc93b04b0c | ||
|
|
adf39dd33f | ||
|
|
000295122d | ||
|
|
2611d033d7 | ||
|
|
ee753b4724 | ||
|
|
db6938a4d0 | ||
|
|
d02f50ee62 | ||
|
|
f4237b7e76 | ||
|
|
302fc5e00d | ||
|
|
c77b3fb96e | ||
|
|
2a54ab5016 | ||
|
|
2ade8a09a3 | ||
|
|
a60e5ce8ec | ||
|
|
8d0251c1f7 | ||
|
|
80220dcd46 | ||
|
|
e72fd9acb1 |
55
.github/sizeup.yml
vendored
Normal file
55
.github/sizeup.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
labeling:
|
||||
applyCategoryLabels: true
|
||||
categoryLabelPrefix: "size/"
|
||||
|
||||
commenting:
|
||||
addCommentWhenScoreThresholdHasBeenExceeded: false
|
||||
|
||||
sizeup:
|
||||
categories:
|
||||
- name: extra small
|
||||
lte: 25
|
||||
label:
|
||||
name: XS
|
||||
description: Should be very easy to review
|
||||
color: 3cbf00
|
||||
- name: small
|
||||
lte: 100
|
||||
label:
|
||||
name: S
|
||||
description: Should be easy to review
|
||||
color: 5d9801
|
||||
- name: medium
|
||||
lte: 250
|
||||
label:
|
||||
name: M
|
||||
description: Should be of average difficulty to review
|
||||
color: 7f7203
|
||||
- name: large
|
||||
lte: 500
|
||||
label:
|
||||
name: L
|
||||
description: May be hard to review
|
||||
color: a14c05
|
||||
- name: extra large
|
||||
lte: 1000
|
||||
label:
|
||||
name: XL
|
||||
description: May be very hard to review
|
||||
color: c32607
|
||||
- name: extra extra large
|
||||
label:
|
||||
name: XXL
|
||||
description: May be extremely hard to review
|
||||
color: e50009
|
||||
ignoredFilePatterns:
|
||||
- ".github/workflows/__*"
|
||||
- "lib/**/*"
|
||||
- "package-lock.json"
|
||||
testFilePatterns:
|
||||
- "**/*.test.ts"
|
||||
scoring:
|
||||
# This formula and the aliases below it are written in prefix notation.
|
||||
# For an explanation of how this works, please see:
|
||||
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
|
||||
formula: "- - + additions deletions comments whitespace"
|
||||
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
@@ -67,10 +67,9 @@ jobs:
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
- id: setup-codeql
|
||||
uses: ./../action/setup-codeql
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
|
||||
2
.github/workflows/__bundle-zstd.yml
generated
vendored
2
.github/workflows/__bundle-zstd.yml
generated
vendored
@@ -79,7 +79,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
4
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
4
.github/workflows/__cleanup-db-cluster-dir.yml
generated
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-slim
|
||||
version: linked
|
||||
name: Clean up database cluster directory
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
setup-kotlin: 'false'
|
||||
- name: Add a file to the database cluster directory
|
||||
run: |
|
||||
mkdir -p "${{ runner.temp }}/customDbLocation/javascript"
|
||||
|
||||
2
.github/workflows/__config-export.yml
generated
vendored
2
.github/workflows/__config-export.yml
generated
vendored
@@ -67,7 +67,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
6
.github/workflows/__config-input.yml
generated
vendored
6
.github/workflows/__config-input.yml
generated
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-slim
|
||||
version: linked
|
||||
name: Config input
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
setup-kotlin: 'false'
|
||||
- name: Copy queries into workspace
|
||||
run: |
|
||||
cp -a ../action/queries .
|
||||
|
||||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -78,7 +78,7 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -85,7 +85,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
@@ -64,7 +64,7 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
4
.github/workflows/__language-aliases.yml
generated
vendored
4
.github/workflows/__language-aliases.yml
generated
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-slim
|
||||
version: linked
|
||||
name: Language aliases
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
setup-kotlin: 'false'
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: C#,java-kotlin,swift,typescript
|
||||
|
||||
15
.github/workflows/__local-bundle.yml
generated
vendored
15
.github/workflows/__local-bundle.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||
|
||||
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -104,6 +114,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Use Xcode 16
|
||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
||||
|
||||
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -63,7 +73,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
@@ -81,6 +91,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
|
||||
13
.github/workflows/__quality-queries.yml
generated
vendored
13
.github/workflows/__quality-queries.yml
generated
vendored
@@ -80,9 +80,10 @@ jobs:
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -90,12 +91,20 @@ jobs:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/post-processed
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
|
||||
15
.github/workflows/__remote-config.yml
generated
vendored
15
.github/workflows/__remote-config.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -72,6 +82,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
15
.github/workflows/__unset-environment.yml
generated
vendored
15
.github/workflows/__unset-environment.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -72,6 +82,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
|
||||
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
15
.github/workflows/__upload-sarif.yml
generated
vendored
15
.github/workflows/__upload-sarif.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -77,6 +87,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
15
.github/workflows/__with-checkout-path.yml
generated
vendored
15
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -27,6 +27,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
workflow_call:
|
||||
inputs:
|
||||
go-version:
|
||||
@@ -34,6 +39,11 @@ on:
|
||||
description: The version of Go to install
|
||||
required: false
|
||||
default: '>=1.21.0'
|
||||
python-version:
|
||||
type: string
|
||||
description: The version of Python to install
|
||||
required: false
|
||||
default: '3.13'
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -70,6 +80,11 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||
cache: false
|
||||
- name: Install Python
|
||||
if: matrix.version != 'nightly-latest'
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ inputs.python-version || '3.13' }}
|
||||
- name: Delete original checkout
|
||||
run: |
|
||||
# delete the original checkout so we don't accidentally use it.
|
||||
|
||||
@@ -15,7 +15,7 @@ defaults:
|
||||
|
||||
jobs:
|
||||
check-expected-release-files:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-slim
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
@@ -79,7 +79,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
- name: Check expected artifacts exist
|
||||
run: |
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
|
||||
2
.github/workflows/debug-artifacts-safe.yml
vendored
2
.github/workflows/debug-artifacts-safe.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v6
|
||||
- name: Check expected artifacts exist
|
||||
run: |
|
||||
VERSIONS="stable-v2.20.3 default linked nightly-latest"
|
||||
|
||||
26
.github/workflows/label-pr-size.yml
vendored
Normal file
26
.github/workflows/label-pr-size.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Label PR with size
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- edited
|
||||
- ready_for_review
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
sizeup:
|
||||
name: Label PR with size
|
||||
runs-on: ubuntu-slim
|
||||
|
||||
steps:
|
||||
- name: Run sizeup
|
||||
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
|
||||
with:
|
||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
configuration-file-path: ".github/sizeup.yml"
|
||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||
- uses: actions/setup-node@v5
|
||||
- uses: actions/setup-node@v6
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
|
||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
|
||||
2
.github/workflows/query-filters.yml
vendored
2
.github/workflows/query-filters.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: npm
|
||||
|
||||
2
.github/workflows/update-bundle.yml
vendored
2
.github/workflows/update-bundle.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 24
|
||||
cache: 'npm'
|
||||
|
||||
14
CHANGELOG.md
14
CHANGELOG.md
@@ -6,6 +6,20 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.31.1 - 30 Oct 2025
|
||||
|
||||
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
|
||||
|
||||
## 4.31.0 - 24 Oct 2025
|
||||
|
||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
||||
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
|
||||
|
||||
## 4.30.9 - 17 Oct 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
|
||||
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
|
||||
|
||||
## 4.30.8 - 10 Oct 2025
|
||||
|
||||
No user facing changes.
|
||||
|
||||
@@ -34,6 +34,7 @@ Actions with special purposes and unlikely to be used directly:
|
||||
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
||||
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
||||
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
||||
- `setup-codeql`: [Experimental] Similar to `init`, except it only installs the CodeQL CLI and does not initialize a database.
|
||||
|
||||
### Workflow Permissions
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ inputs:
|
||||
description: The name of the check run to add text to.
|
||||
required: false
|
||||
output:
|
||||
description: The path of the directory in which to save the SARIF results
|
||||
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
|
||||
required: false
|
||||
default: "../results"
|
||||
upload:
|
||||
@@ -32,14 +32,10 @@ inputs:
|
||||
and 13GB for macOS).
|
||||
required: false
|
||||
add-snippets:
|
||||
description: Specify whether or not to add code snippets to the output sarif file.
|
||||
description: Does not have any effect.
|
||||
required: false
|
||||
default: "false"
|
||||
deprecationMessage: >-
|
||||
The input "add-snippets" is deprecated and will be removed on the first release in August 2025.
|
||||
When this input is set to true it is expected to add code snippets with an alert to the SARIF file.
|
||||
However, since Code Scanning ignores code snippets provided as part of a SARIF file this is currently
|
||||
a no operation. No alternative is available.
|
||||
The input "add-snippets" has been removed and no longer has any effect.
|
||||
skip-queries:
|
||||
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
||||
required: false
|
||||
@@ -70,6 +66,12 @@ inputs:
|
||||
description: Whether to upload the resulting CodeQL database
|
||||
required: false
|
||||
default: "true"
|
||||
post-processed-sarif-path:
|
||||
description: >-
|
||||
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
|
||||
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
|
||||
argument for this input, they are written to the specified directory.
|
||||
required: false
|
||||
wait-for-processing:
|
||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||
required: true
|
||||
|
||||
@@ -131,6 +131,7 @@ export default [
|
||||
"no-sequences": "error",
|
||||
"no-shadow": "off",
|
||||
"@typescript-eslint/no-shadow": "error",
|
||||
"@typescript-eslint/prefer-optional-chain": "error",
|
||||
"one-var": ["error", "never"],
|
||||
},
|
||||
},
|
||||
|
||||
9153
lib/analyze-action-post.js
generated
9153
lib/analyze-action-post.js
generated
File diff suppressed because one or more lines are too long
12260
lib/analyze-action.js
generated
12260
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
2752
lib/autobuild-action.js
generated
2752
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
||||
"cliVersion": "2.23.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
||||
"priorCliVersion": "2.23.2"
|
||||
}
|
||||
|
||||
19603
lib/init-action-post.js
generated
19603
lib/init-action-post.js
generated
File diff suppressed because one or more lines are too long
10735
lib/init-action.js
generated
10735
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
2749
lib/resolve-environment-action.js
generated
2749
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
82174
lib/setup-codeql-action.js
generated
Normal file
82174
lib/setup-codeql-action.js
generated
Normal file
File diff suppressed because one or more lines are too long
9086
lib/start-proxy-action-post.js
generated
9086
lib/start-proxy-action-post.js
generated
File diff suppressed because one or more lines are too long
3229
lib/start-proxy-action.js
generated
3229
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
8817
lib/upload-lib.js
generated
8817
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
9066
lib/upload-sarif-action-post.js
generated
9066
lib/upload-sarif-action-post.js
generated
File diff suppressed because one or more lines are too long
9102
lib/upload-sarif-action.js
generated
9102
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
1451
package-lock.json
generated
1451
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
18
package.json
18
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.30.9",
|
||||
"version": "4.31.2",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.3.1",
|
||||
"@actions/artifact": "^4.0.0",
|
||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||
"@actions/cache": "^4.1.0",
|
||||
"@actions/core": "^1.11.1",
|
||||
@@ -38,9 +38,7 @@
|
||||
"@octokit/request-error": "^7.0.1",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"check-disk-space": "^3.4.0",
|
||||
"console-log-level": "^1.4.1",
|
||||
"del": "^8.0.0",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"get-folder-size": "^5.0.0",
|
||||
@@ -48,7 +46,7 @@
|
||||
"jsonschema": "1.4.1",
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.1",
|
||||
"octokit": "^5.0.3",
|
||||
"octokit": "^5.0.4",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
@@ -56,10 +54,10 @@
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^1.4.0",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.37.0",
|
||||
"@eslint/js": "^9.38.0",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^15.0.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@octokit/types": "^15.0.1",
|
||||
"@types/archiver": "^6.0.4",
|
||||
"@types/console-log-level": "^1.4.5",
|
||||
"@types/follow-redirects": "^1.14.4",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
@@ -67,10 +65,10 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.2",
|
||||
"@typescript-eslint/parser": "^8.41.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.25.10",
|
||||
"esbuild": "^0.25.11",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -15,10 +15,9 @@ steps:
|
||||
if (allCodeqlVersions.length === 0) {
|
||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||
}
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
- id: setup-codeql
|
||||
uses: ./../action/setup-codeql
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Check CodeQL is installed within the toolcache
|
||||
uses: actions/github-script@v8
|
||||
|
||||
@@ -27,7 +27,7 @@ steps:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
name: "Clean up database cluster directory"
|
||||
description: "The database cluster directory is cleaned up if it is not empty."
|
||||
versions: ["linked"]
|
||||
runnerSize: "slim"
|
||||
steps:
|
||||
- name: Add a file to the database cluster directory
|
||||
run: |
|
||||
|
||||
@@ -12,7 +12,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Config input"
|
||||
description: "Tests specifying configuration using the config input"
|
||||
installNode: true
|
||||
versions: ["linked"]
|
||||
runnerSize: "slim"
|
||||
steps:
|
||||
- name: Copy queries into workspace
|
||||
run: |
|
||||
|
||||
@@ -25,7 +25,7 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -17,7 +17,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -11,7 +11,7 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
name: "Language aliases"
|
||||
description: "Tests that language aliases are resolved correctly"
|
||||
versions: ["linked"]
|
||||
runnerSize: "slim"
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Local CodeQL bundle"
|
||||
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||
versions: ["linked"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- name: Fetch latest CodeQL bundle
|
||||
run: |
|
||||
|
||||
@@ -4,6 +4,7 @@ operatingSystems: ["macos", "ubuntu"]
|
||||
env:
|
||||
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- name: Use Xcode 16
|
||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||
|
||||
@@ -3,6 +3,7 @@ description: "Checks that specifying packages using a combination of a config fi
|
||||
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
||||
installGo: true
|
||||
installNode: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -36,9 +36,10 @@ steps:
|
||||
with:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
@@ -46,12 +47,20 @@ steps:
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/post-processed"
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
|
||||
@@ -6,6 +6,7 @@ versions:
|
||||
- linked
|
||||
- nightly-latest
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -6,6 +6,7 @@ versions:
|
||||
- linked
|
||||
- nightly-latest
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||
versions: ["default"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -3,6 +3,7 @@ description: "Checks that uploading SARIFs to the code quality endpoint works"
|
||||
versions: ["default"]
|
||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
|
||||
@@ -2,6 +2,7 @@ name: "Use a custom `checkout_path`"
|
||||
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
||||
versions: ["linked"]
|
||||
installGo: true
|
||||
installPython: true
|
||||
steps:
|
||||
# This ensures we don't accidentally use the original checkout for any part of the test.
|
||||
- name: Delete original checkout
|
||||
|
||||
@@ -76,22 +76,34 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
if version == "latest":
|
||||
raise ValueError('Did not recognize "version: latest". Did you mean "version: linked"?')
|
||||
|
||||
runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]
|
||||
# Determine runner size
|
||||
# "default" is used in check specifications and maps to "latest" for the actual runner image
|
||||
runnerSize = checkSpecification.get('runnerSize', 'default')
|
||||
if runnerSize == 'default':
|
||||
runnerSize = 'latest'
|
||||
|
||||
# Build runner images based on runner size and operating systems
|
||||
operatingSystems = checkSpecification.get('operatingSystems', ["ubuntu"])
|
||||
|
||||
|
||||
for operatingSystem in operatingSystems:
|
||||
runnerImagesForOs = [image for image in runnerImages if image.startswith(operatingSystem)]
|
||||
|
||||
for runnerImage in runnerImagesForOs:
|
||||
matrix.append({
|
||||
'os': runnerImage,
|
||||
'version': version
|
||||
})
|
||||
# Construct the runner image name: {os}-{size}
|
||||
# Note: Not all OS types may support all runner sizes (e.g., only ubuntu-slim exists as of now)
|
||||
runnerImage = f"{operatingSystem}-{runnerSize}"
|
||||
matrix.append({
|
||||
'os': runnerImage,
|
||||
'version': version
|
||||
})
|
||||
|
||||
useAllPlatformBundle = "false" # Default to false
|
||||
if checkSpecification.get('useAllPlatformBundle'):
|
||||
useAllPlatformBundle = checkSpecification['useAllPlatformBundle']
|
||||
|
||||
# Store the runner size for use in prepare-test step
|
||||
# This is determined once per check specification
|
||||
finalRunnerSize = checkSpecification.get('runnerSize', 'default')
|
||||
if finalRunnerSize == 'default':
|
||||
finalRunnerSize = 'latest'
|
||||
|
||||
|
||||
if 'analysisKinds' in checkSpecification:
|
||||
newMatrix = []
|
||||
@@ -117,7 +129,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
steps.extend([
|
||||
{
|
||||
'name': 'Install Node.js',
|
||||
'uses': 'actions/setup-node@v5',
|
||||
'uses': 'actions/setup-node@v6',
|
||||
'with': {
|
||||
'node-version': '20.x',
|
||||
'cache': 'npm',
|
||||
@@ -136,9 +148,10 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
'with': {
|
||||
'version': '${{ matrix.version }}',
|
||||
'use-all-platform-bundle': useAllPlatformBundle,
|
||||
# If the action is being run from a container, then do not setup kotlin.
|
||||
# This is because the kotlin binaries cannot be downloaded from the container.
|
||||
'setup-kotlin': str(not 'container' in checkSpecification).lower(),
|
||||
# If the action is being run from a container or on a slim runner, then do not setup kotlin.
|
||||
# Containers: kotlin binaries cannot be downloaded from the container.
|
||||
# Slim runners: limited resources may cause issues with Kotlin setup.
|
||||
'setup-kotlin': str(not 'container' in checkSpecification and finalRunnerSize != 'slim').lower(),
|
||||
}
|
||||
})
|
||||
|
||||
@@ -184,6 +197,26 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
||||
}
|
||||
})
|
||||
|
||||
installPython = is_truthy(checkSpecification.get('installPython', ''))
|
||||
|
||||
if installPython:
|
||||
basePythonVersionExpr = '3.13'
|
||||
workflowInputs['python-version'] = {
|
||||
'type': 'string',
|
||||
'description': 'The version of Python to install',
|
||||
'required': False,
|
||||
'default': basePythonVersionExpr,
|
||||
}
|
||||
|
||||
steps.append({
|
||||
'name': 'Install Python',
|
||||
'if': 'matrix.version != \'nightly-latest\'',
|
||||
'uses': 'actions/setup-python@v6',
|
||||
'with': {
|
||||
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
|
||||
}
|
||||
})
|
||||
|
||||
# If container initialisation steps are present in the check specification,
|
||||
# make sure to execute them first.
|
||||
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
||||
|
||||
@@ -9,9 +9,15 @@ if [ "$GITHUB_ACTIONS" = "true" ]; then
|
||||
fi
|
||||
|
||||
# Check if npm install is likely needed before proceeding
|
||||
if [ ! -d node_modules ] || [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated..."
|
||||
if [ ! -d node_modules ]; then
|
||||
echo "Running 'npm install' because 'node_modules' directory is missing."
|
||||
npm install
|
||||
elif [ package.json -nt package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'package-lock.json' appears to be outdated."
|
||||
npm install
|
||||
elif [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
||||
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated."
|
||||
npm install
|
||||
else
|
||||
echo "Skipping 'npm install' because 'node_modules/.package-lock.json' appears to be up-to-date."
|
||||
echo "Skipping 'npm install' because everything appears to be up-to-date."
|
||||
fi
|
||||
|
||||
39
setup-codeql/action.yml
Normal file
39
setup-codeql/action.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
name: 'CodeQL: Setup'
|
||||
description: 'Installs the CodeQL CLI'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
tools:
|
||||
description: >-
|
||||
By default, the Action will use the recommended version of the CodeQL
|
||||
Bundle to analyze your project. You can override this choice using this
|
||||
input. One of:
|
||||
|
||||
- A local path to a CodeQL Bundle tarball, or
|
||||
- The URL of a CodeQL Bundle tarball GitHub release asset, or
|
||||
- A special value `linked` which uses the version of the CodeQL tools
|
||||
that the Action has been bundled with.
|
||||
- A special value `nightly` which uses the latest nightly version of the
|
||||
CodeQL tools. Note that this is unstable and not recommended for
|
||||
production use.
|
||||
|
||||
If not specified, the Action will check in several places until it finds
|
||||
the CodeQL tools.
|
||||
required: false
|
||||
token:
|
||||
description: GitHub token to use for authenticating with this instance of GitHub.
|
||||
default: ${{ github.token }}
|
||||
required: false
|
||||
matrix:
|
||||
default: ${{ toJson(matrix) }}
|
||||
required: false
|
||||
external-repository-token:
|
||||
description: A token for fetching additional files from private repositories in the same GitHub instance that is running this action.
|
||||
required: false
|
||||
outputs:
|
||||
codeql-path:
|
||||
description: The path of the CodeQL binary that was installed.
|
||||
codeql-version:
|
||||
description: The version of the CodeQL binary that was installed.
|
||||
runs:
|
||||
using: node24
|
||||
main: '../lib/setup-codeql-action.js'
|
||||
@@ -1,12 +1,19 @@
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import {
|
||||
AnalysisKind,
|
||||
getAnalysisKinds,
|
||||
parseAnalysisKinds,
|
||||
supportedAnalysisKinds,
|
||||
} from "./analyses";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("All known analysis kinds can be parsed successfully", async (t) => {
|
||||
for (const analysisKind of supportedAnalysisKinds) {
|
||||
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
||||
@@ -34,3 +41,29 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
|
||||
instanceOf: ConfigurationError,
|
||||
});
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub
|
||||
.withArgs("analysis-kinds")
|
||||
.returns("code-scanning,code-quality");
|
||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
|
||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
||||
optionalInputStub.withArgs("quality-queries").returns("code-quality");
|
||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
||||
});
|
||||
|
||||
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
|
||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
|
||||
});
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { fixCodeQualityCategory } from "./actions-util";
|
||||
import {
|
||||
fixCodeQualityCategory,
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
} from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
@@ -41,6 +45,55 @@ export async function parseAnalysisKinds(
|
||||
);
|
||||
}
|
||||
|
||||
// Used to avoid re-parsing the input after we have done it once.
|
||||
let cachedAnalysisKinds: AnalysisKind[] | undefined;
|
||||
|
||||
/**
|
||||
* Initialises the analysis kinds for the analysis based on the `analysis-kinds` input.
|
||||
* This function will also use the deprecated `quality-queries` input as an indicator to enable `code-quality`.
|
||||
* If the `analysis-kinds` input cannot be parsed, a `ConfigurationError` is thrown.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param skipCache For testing, whether to ignore the cached values (default: false).
|
||||
*
|
||||
* @returns The array of enabled analysis kinds.
|
||||
* @throws A `ConfigurationError` if the `analysis-kinds` input cannot be parsed.
|
||||
*/
|
||||
export async function getAnalysisKinds(
|
||||
logger: Logger,
|
||||
skipCache: boolean = false,
|
||||
): Promise<AnalysisKind[]> {
|
||||
if (!skipCache && cachedAnalysisKinds !== undefined) {
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
cachedAnalysisKinds = await parseAnalysisKinds(
|
||||
getRequiredInput("analysis-kinds"),
|
||||
);
|
||||
|
||||
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
||||
const qualityQueriesInput = getOptionalInput("quality-queries");
|
||||
|
||||
if (qualityQueriesInput !== undefined) {
|
||||
logger.warning(
|
||||
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
||||
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
||||
);
|
||||
}
|
||||
|
||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||
// if an input to `quality-queries` was specified. We should remove this once
|
||||
// `quality-queries` is no longer used.
|
||||
if (
|
||||
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
qualityQueriesInput !== undefined
|
||||
) {
|
||||
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
|
||||
}
|
||||
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
/** The queries to use for Code Quality analyses. */
|
||||
export const codeQualityQueries: string[] = ["code-quality"];
|
||||
|
||||
|
||||
@@ -24,6 +24,9 @@ setupTests(test);
|
||||
// but the first test would fail.
|
||||
|
||||
test("analyze action with RAM & threads from environment variables", async (t) => {
|
||||
// This test frequently times out on Windows with the default timeout, so we bump
|
||||
// it a bit to 20s.
|
||||
t.timeout(1000 * 20);
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
@@ -75,7 +78,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -24,6 +24,7 @@ setupTests(test);
|
||||
// but the first test would fail.
|
||||
|
||||
test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
t.timeout(1000 * 20);
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
@@ -75,7 +76,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||
t.assert(runQueriesStub.calledOnce);
|
||||
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -52,6 +52,7 @@ import {
|
||||
} from "./trap-caching";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
interface AnalysisStatusReport
|
||||
@@ -211,7 +212,9 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let uploadResult: UploadResult | undefined = undefined;
|
||||
let uploadResults:
|
||||
| Partial<Record<analyses.AnalysisKind, UploadResult>>
|
||||
| undefined = undefined;
|
||||
let runStats: QueriesStatusReport | undefined = undefined;
|
||||
let config: Config | undefined = undefined;
|
||||
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
||||
@@ -321,10 +324,16 @@ async function run() {
|
||||
);
|
||||
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
// Warn if the removed `add-snippets` input is used.
|
||||
if (actionsUtil.getOptionalInput("add-snippets") !== undefined) {
|
||||
logger.warning(
|
||||
"The `add-snippets` input has been removed and no longer has any effect.",
|
||||
);
|
||||
}
|
||||
|
||||
runStats = await runQueries(
|
||||
outputDir,
|
||||
memory,
|
||||
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
|
||||
threads,
|
||||
diffRangePackDir,
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
@@ -341,31 +350,67 @@ async function run() {
|
||||
}
|
||||
core.setOutput("db-locations", dbLocations);
|
||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
const uploadKind = actionsUtil.getUploadValue(
|
||||
actionsUtil.getOptionalInput("upload"),
|
||||
);
|
||||
if (runStats) {
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
features,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
category,
|
||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
||||
);
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
} else if (uploadKind === "always") {
|
||||
uploadResults = {};
|
||||
|
||||
if (isCodeScanningEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeScanning,
|
||||
);
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
uploadResults[analyses.AnalysisKind.CodeQuality] =
|
||||
await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
uploadResults = {};
|
||||
logger.info("Not uploading results");
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = analyses.CodeQuality;
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
analysis,
|
||||
// Set the SARIF id outputs only if we have results for them, to avoid
|
||||
// having keys with empty values in the action output.
|
||||
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
|
||||
core.setOutput(
|
||||
"sarif-id",
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
||||
);
|
||||
}
|
||||
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
|
||||
core.setOutput(
|
||||
"quality-sarif-id",
|
||||
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
|
||||
);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
} else {
|
||||
logger.info("Not uploading results");
|
||||
@@ -408,12 +453,12 @@ async function run() {
|
||||
if (util.isInTestMode()) {
|
||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (
|
||||
uploadResult !== undefined &&
|
||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
|
||||
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
||||
) {
|
||||
await uploadLib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
||||
getActionsLogger(),
|
||||
);
|
||||
}
|
||||
@@ -450,13 +495,16 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
|
||||
if (runStats && uploadResult) {
|
||||
if (
|
||||
runStats !== undefined &&
|
||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
|
||||
) {
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
{
|
||||
...runStats,
|
||||
...uploadResult.statusReport,
|
||||
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
|
||||
},
|
||||
undefined,
|
||||
trapCacheUploadTime,
|
||||
@@ -466,7 +514,7 @@ async function run() {
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else if (runStats) {
|
||||
} else if (runStats !== undefined) {
|
||||
await sendStatusReport(
|
||||
startedAt,
|
||||
config,
|
||||
|
||||
@@ -4,10 +4,8 @@ import * as path from "path";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import {
|
||||
exportedForTesting,
|
||||
runQueries,
|
||||
defaultSuites,
|
||||
resolveQuerySuiteAlias,
|
||||
@@ -39,7 +37,6 @@ test("status report fields", async (t) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
const memoryFlag = "";
|
||||
const addSnippetsFlag = "";
|
||||
const threadsFlag = "";
|
||||
sinon.stub(uploadLib, "validateSarifFileSchema");
|
||||
|
||||
@@ -105,7 +102,6 @@ test("status report fields", async (t) => {
|
||||
const statusReport = await runQueries(
|
||||
tmpDir,
|
||||
memoryFlag,
|
||||
addSnippetsFlag,
|
||||
threadsFlag,
|
||||
undefined,
|
||||
undefined,
|
||||
@@ -131,204 +127,6 @@ test("status report fields", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return exportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
test("resolveQuerySuiteAlias", (t) => {
|
||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||
for (const suite of defaultSuites) {
|
||||
|
||||
197
src/analyze.ts
197
src/analyze.ts
@@ -3,16 +3,10 @@ import * as path from "path";
|
||||
import { performance } from "perf_hooks";
|
||||
|
||||
import * as io from "@actions/io";
|
||||
import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
PullRequestBranches,
|
||||
} from "./actions-util";
|
||||
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
import { setupCppAutobuild } from "./autobuild";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
@@ -21,13 +15,13 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
||||
import {
|
||||
DiffThunkRange,
|
||||
writeDiffRangesJsonFile,
|
||||
getPullRequestEditedDiffRanges,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
import { endTracingForCluster } from "./tracer-config";
|
||||
import * as util from "./util";
|
||||
@@ -313,185 +307,6 @@ export async function setupDiffInformedQueryRun(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API.
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an extension pack in the temporary directory that contains the file
|
||||
* line ranges that were added or modified in the pull request.
|
||||
@@ -621,7 +436,6 @@ export function addSarifExtension(
|
||||
export async function runQueries(
|
||||
sarifFolder: string,
|
||||
memoryFlag: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
diffRangePackDir: string | undefined,
|
||||
automationDetailsId: string | undefined,
|
||||
@@ -811,7 +625,6 @@ export async function runQueries(
|
||||
databasePath,
|
||||
queries,
|
||||
sarifFile,
|
||||
addSnippetsFlag,
|
||||
threadsFlag,
|
||||
enableDebugLogging ? "-vv" : "-v",
|
||||
sarifRunPropertyFlag,
|
||||
@@ -855,7 +668,7 @@ export async function runFinalize(
|
||||
logger: Logger,
|
||||
): Promise<DatabaseCreationTimings> {
|
||||
try {
|
||||
await del.deleteAsync(outputDir, { force: true });
|
||||
await fs.promises.rm(outputDir, { force: true, recursive: true });
|
||||
} catch (error: any) {
|
||||
if (error?.code !== "ENOENT") {
|
||||
throw error;
|
||||
@@ -922,7 +735,3 @@ export async function warnIfGoInstalledAfterInit(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
@@ -169,4 +169,39 @@ test("wrapApiConfigurationError correctly wraps specific configuration errors",
|
||||
res,
|
||||
new util.ConfigurationError("Resource not accessible by integration"),
|
||||
);
|
||||
|
||||
// Enablement errors.
|
||||
const codeSecurityNotEnabledError = new util.HTTPError(
|
||||
"Code Security must be enabled for this repository to use code scanning",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(codeSecurityNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(codeSecurityNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
const advancedSecurityNotEnabledError = new util.HTTPError(
|
||||
"Advanced Security must be enabled for this repository to use code scanning",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(advancedSecurityNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(advancedSecurityNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
const codeScanningNotEnabledError = new util.HTTPError(
|
||||
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
|
||||
403,
|
||||
);
|
||||
res = api.wrapApiConfigurationError(codeScanningNotEnabledError);
|
||||
t.deepEqual(
|
||||
res,
|
||||
new util.ConfigurationError(
|
||||
api.getFeatureEnablementError(codeScanningNotEnabledError.message),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as githubUtils from "@actions/github/lib/utils";
|
||||
import * as retry from "@octokit/plugin-retry";
|
||||
import consoleLogLevel from "console-log-level";
|
||||
|
||||
import { getActionVersion, getRequiredInput } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
import {
|
||||
asHTTPError,
|
||||
ConfigurationError,
|
||||
getRequiredEnvParam,
|
||||
GITHUB_DOTCOM_URL,
|
||||
GitHubVariant,
|
||||
GitHubVersion,
|
||||
isHTTPError,
|
||||
parseGitHubUrl,
|
||||
parseMatrixInput,
|
||||
} from "./util";
|
||||
@@ -50,7 +49,12 @@ function createApiClientWithDetails(
|
||||
githubUtils.getOctokitOptions(auth, {
|
||||
baseUrl: apiDetails.apiURL,
|
||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||
log: consoleLogLevel({ level: "debug" }),
|
||||
log: {
|
||||
debug: core.debug,
|
||||
info: core.info,
|
||||
warn: core.warning,
|
||||
error: core.error,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -279,23 +283,49 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
||||
});
|
||||
}
|
||||
|
||||
function isEnablementError(msg: string) {
|
||||
return [
|
||||
/Code Security must be enabled/,
|
||||
/Advanced Security must be enabled/,
|
||||
/Code Scanning is not enabled/,
|
||||
].some((pattern) => pattern.test(msg));
|
||||
}
|
||||
|
||||
// TODO: Move to `error-messages.ts` after refactoring import order to avoid cycle
|
||||
// since `error-messages.ts` currently depends on this file.
|
||||
export function getFeatureEnablementError(message: string): string {
|
||||
return `Please verify that the necessary features are enabled: ${message}`;
|
||||
}
|
||||
|
||||
export function wrapApiConfigurationError(e: unknown) {
|
||||
if (isHTTPError(e)) {
|
||||
const httpError = asHTTPError(e);
|
||||
if (httpError !== undefined) {
|
||||
if (
|
||||
e.message.includes("API rate limit exceeded for installation") ||
|
||||
e.message.includes("commit not found") ||
|
||||
e.message.includes("Resource not accessible by integration") ||
|
||||
/ref .* not found in this repository/.test(e.message)
|
||||
[
|
||||
/API rate limit exceeded/,
|
||||
/commit not found/,
|
||||
/Resource not accessible by integration/,
|
||||
/ref .* not found in this repository/,
|
||||
].some((pattern) => pattern.test(httpError.message))
|
||||
) {
|
||||
return new ConfigurationError(e.message);
|
||||
} else if (
|
||||
e.message.includes("Bad credentials") ||
|
||||
e.message.includes("Not Found")
|
||||
return new ConfigurationError(httpError.message);
|
||||
}
|
||||
if (
|
||||
httpError.message.includes("Bad credentials") ||
|
||||
httpError.message.includes("Not Found")
|
||||
) {
|
||||
return new ConfigurationError(
|
||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||
);
|
||||
}
|
||||
if (httpError.status === 403 && isEnablementError(httpError.message)) {
|
||||
return new ConfigurationError(
|
||||
getFeatureEnablementError(httpError.message),
|
||||
);
|
||||
}
|
||||
if (httpError.status === 429) {
|
||||
return new ConfigurationError("API rate limit exceeded");
|
||||
}
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
@@ -310,6 +310,20 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
|
||||
t.true(wrappedError instanceof ConfigurationError);
|
||||
});
|
||||
|
||||
test("wrapCliConfigurationError - unknown query file", (t) => {
|
||||
const commandError = new CommandInvocationError(
|
||||
"codeql",
|
||||
["database", "init"],
|
||||
2,
|
||||
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
|
||||
);
|
||||
const cliError = new CliError(commandError);
|
||||
|
||||
const wrappedError = wrapCliConfigurationError(cliError);
|
||||
|
||||
t.true(wrappedError instanceof ConfigurationError);
|
||||
});
|
||||
|
||||
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
||||
const commandError = new CommandInvocationError(
|
||||
"codeql",
|
||||
|
||||
@@ -264,6 +264,9 @@ export const cliErrorsConfig: Record<
|
||||
new RegExp(
|
||||
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
||||
),
|
||||
new RegExp(
|
||||
"is not a .ql file, .qls file, a directory, or a query pack specification.",
|
||||
),
|
||||
],
|
||||
},
|
||||
[CliConfigErrorCategory.PackMissingAuth]: {
|
||||
|
||||
@@ -5,7 +5,6 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
import nock from "nock";
|
||||
import * as sinon from "sinon";
|
||||
@@ -36,7 +35,6 @@ import {
|
||||
createTestConfig,
|
||||
} from "./testing-utils";
|
||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
import { initializeEnvironment } from "./util";
|
||||
|
||||
@@ -558,7 +556,7 @@ const injectedConfigMacro = test.macro({
|
||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(augmentedConfig, expectedConfig);
|
||||
|
||||
await del.deleteAsync(configFile, { force: true });
|
||||
await fs.promises.rm(configFile, { force: true });
|
||||
});
|
||||
},
|
||||
|
||||
@@ -870,84 +868,6 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
||||
});
|
||||
});
|
||||
|
||||
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0", {
|
||||
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
|
||||
}),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
flagPassed: true,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.GHES,
|
||||
version: "3.10.0",
|
||||
},
|
||||
flagPassed: true,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
];
|
||||
|
||||
for (const {
|
||||
codeqlVersion,
|
||||
flagPassed,
|
||||
githubVersion,
|
||||
negativeFlagPassed,
|
||||
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
|
||||
test(`database interpret-results passes ${
|
||||
flagPassed
|
||||
? "--new-analysis-summary"
|
||||
: negativeFlagPassed
|
||||
? "--no-new-analysis-summary"
|
||||
: "nothing"
|
||||
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
|
||||
util.GitHubVariant[githubVersion.type]
|
||||
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// io throws because of the test CodeQL object.
|
||||
sinon.stub(io, "which").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
"",
|
||||
[],
|
||||
"",
|
||||
"",
|
||||
"",
|
||||
"-v",
|
||||
undefined,
|
||||
"",
|
||||
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
|
||||
createFeatures([]),
|
||||
);
|
||||
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
|
||||
t.is(
|
||||
actualArgs.includes("--new-analysis-summary"),
|
||||
flagPassed,
|
||||
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
|
||||
);
|
||||
t.is(
|
||||
actualArgs.includes("--no-new-analysis-summary"),
|
||||
negativeFlagPassed,
|
||||
`--no-new-analysis-summary should${
|
||||
negativeFlagPassed ? "" : "n't"
|
||||
} be passed`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
test("runTool summarizes several fatal errors", async (t) => {
|
||||
const heapError =
|
||||
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
||||
@@ -1125,7 +1045,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP
|
||||
);
|
||||
t.truthy(configArg, "Should have injected a codescanning config");
|
||||
const configFile = configArg!.split("=")[1];
|
||||
await del.deleteAsync(configFile, { force: true });
|
||||
await fs.promises.rm(configFile, { force: true });
|
||||
});
|
||||
|
||||
export function stubToolRunnerConstructor(
|
||||
|
||||
@@ -3,7 +3,6 @@ import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import { RequestError } from "@octokit/request-error";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
@@ -168,7 +167,6 @@ export interface CodeQL {
|
||||
databasePath: string,
|
||||
querySuitePaths: string[] | undefined,
|
||||
sarifFile: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string | undefined,
|
||||
sarifRunPropertyFlag: string | undefined,
|
||||
@@ -268,7 +266,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||
const CODEQL_MINIMUM_VERSION = "2.17.6";
|
||||
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
@@ -371,11 +369,11 @@ export async function setupCodeQL(
|
||||
toolsVersion,
|
||||
zstdAvailability,
|
||||
};
|
||||
} catch (e) {
|
||||
} catch (rawError) {
|
||||
const e = api.wrapApiConfigurationError(rawError);
|
||||
const ErrorClass =
|
||||
e instanceof util.ConfigurationError ||
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
|
||||
(e instanceof RequestError && e.status === 429) // rate limited
|
||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
||||
? util.ConfigurationError
|
||||
: Error;
|
||||
|
||||
@@ -818,7 +816,6 @@ export async function getCodeQLForCmd(
|
||||
databasePath: string,
|
||||
querySuitePaths: string[] | undefined,
|
||||
sarifFile: string,
|
||||
addSnippetsFlag: string,
|
||||
threadsFlag: string,
|
||||
verbosityFlag: string,
|
||||
sarifRunPropertyFlag: string | undefined,
|
||||
@@ -837,7 +834,6 @@ export async function getCodeQLForCmd(
|
||||
"--format=sarif-latest",
|
||||
verbosityFlag,
|
||||
`--output=${sarifFile}`,
|
||||
addSnippetsFlag,
|
||||
"--print-diagnostics-summary",
|
||||
"--print-metrics-summary",
|
||||
"--sarif-add-baseline-file-info",
|
||||
@@ -861,14 +857,6 @@ export async function getCodeQLForCmd(
|
||||
} else {
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (
|
||||
!isSupportedToolsFeature(
|
||||
await this.getVersion(),
|
||||
ToolsFeature.AnalysisSummaryV2IsDefault,
|
||||
)
|
||||
) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
if (querySuitePaths) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
|
||||
@@ -49,10 +49,9 @@ function createTestInitConfigInputs(
|
||||
return Object.assign(
|
||||
{},
|
||||
{
|
||||
analysisKindsInput: "code-scanning",
|
||||
analysisKinds: [AnalysisKind.CodeScanning],
|
||||
languagesInput: undefined,
|
||||
queriesInput: undefined,
|
||||
qualityQueriesInput: undefined,
|
||||
packsInput: undefined,
|
||||
configFile: undefined,
|
||||
dbLocation: undefined,
|
||||
@@ -149,6 +148,7 @@ test("load empty config", async (t) => {
|
||||
});
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
@@ -188,8 +188,9 @@ test("load code quality config", async (t) => {
|
||||
});
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
analysisKindsInput: "code-quality",
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
tempDir,
|
||||
@@ -272,8 +273,9 @@ test("initActionState doesn't throw if there are queries configured in the repos
|
||||
|
||||
await t.notThrowsAsync(async () => {
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
analysisKindsInput: "code-quality",
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
languagesInput: languages,
|
||||
repository: { owner: "github", repo: "example" },
|
||||
tempDir,
|
||||
@@ -310,6 +312,7 @@ test("loading a saved config produces the same config", async (t) => {
|
||||
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
||||
|
||||
const config1 = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: "javascript,python",
|
||||
tempDir,
|
||||
@@ -361,6 +364,7 @@ test("loading config with version mismatch throws", async (t) => {
|
||||
.returns("does-not-exist");
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput: "javascript,python",
|
||||
tempDir,
|
||||
@@ -389,6 +393,7 @@ test("load input outside of workspace", async (t) => {
|
||||
return await withTmpDir(async (tempDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: "../input",
|
||||
tempDir,
|
||||
@@ -416,6 +421,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile,
|
||||
tempDir,
|
||||
@@ -444,6 +450,7 @@ test("load non-existent input", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile,
|
||||
@@ -527,6 +534,7 @@ test("load non-empty input", async (t) => {
|
||||
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
||||
|
||||
const actualConfig = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
buildModeInput: "none",
|
||||
@@ -583,6 +591,7 @@ test("Using config input and file together, config input should be used.", async
|
||||
const languagesInput = "javascript";
|
||||
|
||||
const config = await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile: configFilePath,
|
||||
@@ -633,6 +642,7 @@ test("API client used when reading remote config", async (t) => {
|
||||
const languagesInput = "javascript";
|
||||
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
configFile,
|
||||
@@ -653,6 +663,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
@@ -681,6 +692,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
configFile: repoReference,
|
||||
tempDir,
|
||||
@@ -710,6 +722,7 @@ test("No detected languages", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
tempDir,
|
||||
codeql,
|
||||
@@ -732,6 +745,7 @@ test("Unknown languages", async (t) => {
|
||||
|
||||
try {
|
||||
await configUtils.initConfig(
|
||||
createFeatures([]),
|
||||
createTestInitConfigInputs({
|
||||
languagesInput,
|
||||
tempDir,
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
CodeQuality,
|
||||
codeQualityQueries,
|
||||
CodeScanning,
|
||||
parseAnalysisKinds,
|
||||
} from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||
@@ -20,6 +19,7 @@ import {
|
||||
calculateAugmentation,
|
||||
ExcludeQueryFilter,
|
||||
generateCodeScanningConfig,
|
||||
parseUserConfig,
|
||||
UserConfig,
|
||||
} from "./config/db-config";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
@@ -373,10 +373,8 @@ export async function getRawLanguages(
|
||||
|
||||
/** Inputs required to initialize a configuration. */
|
||||
export interface InitConfigInputs {
|
||||
analysisKindsInput: string;
|
||||
languagesInput: string | undefined;
|
||||
queriesInput: string | undefined;
|
||||
qualityQueriesInput: string | undefined;
|
||||
packsInput: string | undefined;
|
||||
configFile: string | undefined;
|
||||
dbLocation: string | undefined;
|
||||
@@ -396,6 +394,7 @@ export interface InitConfigInputs {
|
||||
apiDetails: api.GitHubApiCombinedDetails;
|
||||
features: FeatureEnablement;
|
||||
repositoryProperties: RepositoryProperties;
|
||||
analysisKinds: AnalysisKind[];
|
||||
logger: Logger;
|
||||
}
|
||||
|
||||
@@ -405,10 +404,8 @@ export interface InitConfigInputs {
|
||||
*/
|
||||
export async function initActionState(
|
||||
{
|
||||
analysisKindsInput,
|
||||
languagesInput,
|
||||
queriesInput,
|
||||
qualityQueriesInput,
|
||||
packsInput,
|
||||
buildModeInput,
|
||||
dbLocation,
|
||||
@@ -424,22 +421,11 @@ export async function initActionState(
|
||||
githubVersion,
|
||||
features,
|
||||
repositoryProperties,
|
||||
analysisKinds,
|
||||
logger,
|
||||
}: InitConfigInputs,
|
||||
userConfig: UserConfig,
|
||||
): Promise<Config> {
|
||||
const analysisKinds = await parseAnalysisKinds(analysisKindsInput);
|
||||
|
||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||
// if an input to `quality-queries` was specified. We should remove this once
|
||||
// `quality-queries` is no longer used.
|
||||
if (
|
||||
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
qualityQueriesInput !== undefined
|
||||
) {
|
||||
analysisKinds.push(AnalysisKind.CodeQuality);
|
||||
}
|
||||
|
||||
const languages = await getLanguages(
|
||||
codeql,
|
||||
languagesInput,
|
||||
@@ -540,10 +526,12 @@ async function downloadCacheWithTime(
|
||||
}
|
||||
|
||||
async function loadUserConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
workspacePath: string,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
tempDir: string,
|
||||
validateConfig: boolean,
|
||||
): Promise<UserConfig> {
|
||||
if (isLocal(configFile)) {
|
||||
if (configFile !== userConfigFromActionPath(tempDir)) {
|
||||
@@ -556,9 +544,14 @@ async function loadUserConfig(
|
||||
);
|
||||
}
|
||||
}
|
||||
return getLocalConfig(configFile);
|
||||
return getLocalConfig(logger, configFile, validateConfig);
|
||||
} else {
|
||||
return await getRemoteConfig(configFile, apiDetails);
|
||||
return await getRemoteConfig(
|
||||
logger,
|
||||
configFile,
|
||||
apiDetails,
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -794,7 +787,10 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||
export async function initConfig(
|
||||
features: FeatureEnablement,
|
||||
inputs: InitConfigInputs,
|
||||
): Promise<Config> {
|
||||
const { logger, tempDir } = inputs;
|
||||
|
||||
// if configInput is set, it takes precedence over configFile
|
||||
@@ -814,11 +810,14 @@ export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||
logger.debug("No configuration file was provided");
|
||||
} else {
|
||||
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
||||
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
|
||||
userConfig = await loadUserConfig(
|
||||
logger,
|
||||
inputs.configFile,
|
||||
inputs.workspacePath,
|
||||
inputs.apiDetails,
|
||||
tempDir,
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -912,7 +911,11 @@ function isLocal(configPath: string): boolean {
|
||||
return configPath.indexOf("@") === -1;
|
||||
}
|
||||
|
||||
function getLocalConfig(configFile: string): UserConfig {
|
||||
function getLocalConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
validateConfig: boolean,
|
||||
): UserConfig {
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new ConfigurationError(
|
||||
@@ -920,12 +923,19 @@ function getLocalConfig(configFile: string): UserConfig {
|
||||
);
|
||||
}
|
||||
|
||||
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
|
||||
return parseUserConfig(
|
||||
logger,
|
||||
configFile,
|
||||
fs.readFileSync(configFile, "utf-8"),
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
async function getRemoteConfig(
|
||||
logger: Logger,
|
||||
configFile: string,
|
||||
apiDetails: api.GitHubApiCombinedDetails,
|
||||
validateConfig: boolean,
|
||||
): Promise<UserConfig> {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp(
|
||||
@@ -933,7 +943,7 @@ async function getRemoteConfig(
|
||||
);
|
||||
const pieces = format.exec(configFile);
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
if (pieces?.groups === undefined || pieces.length < 5) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
||||
);
|
||||
@@ -961,9 +971,12 @@ async function getRemoteConfig(
|
||||
);
|
||||
}
|
||||
|
||||
return yaml.load(
|
||||
return parseUserConfig(
|
||||
logger,
|
||||
configFile,
|
||||
Buffer.from(fileContents, "base64").toString("binary"),
|
||||
) as UserConfig;
|
||||
validateConfig,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,7 +2,13 @@ import test, { ExecutionContext } from "ava";
|
||||
|
||||
import { RepositoryProperties } from "../feature-flags/properties";
|
||||
import { KnownLanguage, Language } from "../languages";
|
||||
import { prettyPrintPack } from "../util";
|
||||
import { getRunnerLogger } from "../logging";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
} from "../testing-utils";
|
||||
import { ConfigurationError, prettyPrintPack } from "../util";
|
||||
|
||||
import * as dbConfig from "./db-config";
|
||||
|
||||
@@ -391,3 +397,111 @@ test(
|
||||
{},
|
||||
/"a-pack-without-a-scope" is not a valid pack/,
|
||||
);
|
||||
|
||||
test("parseUserConfig - successfully parses valid YAML", (t) => {
|
||||
const result = dbConfig.parseUserConfig(
|
||||
getRunnerLogger(true),
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries:
|
||||
- uses: foo
|
||||
some-unknown-option: true
|
||||
`,
|
||||
true,
|
||||
);
|
||||
t.truthy(result);
|
||||
if (t.truthy(result["paths-ignore"])) {
|
||||
t.is(result["paths-ignore"].length, 1);
|
||||
t.is(result["paths-ignore"][0], "some/path");
|
||||
}
|
||||
if (t.truthy(result["queries"])) {
|
||||
t.is(result["queries"].length, 1);
|
||||
t.deepEqual(result["queries"][0], { uses: "foo" });
|
||||
}
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
|
||||
t.throws(
|
||||
() =>
|
||||
dbConfig.parseUserConfig(
|
||||
getRunnerLogger(true),
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries:
|
||||
- foo
|
||||
`,
|
||||
true,
|
||||
),
|
||||
{
|
||||
instanceOf: ConfigurationError,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.notThrows(() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
query-filters:
|
||||
- something
|
||||
- include: foo
|
||||
- exclude: bar
|
||||
`,
|
||||
true,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries: true
|
||||
`,
|
||||
true,
|
||||
),
|
||||
{
|
||||
instanceOf: ConfigurationError,
|
||||
message:
|
||||
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
|
||||
},
|
||||
);
|
||||
|
||||
const expectedMessages = ["instance.queries is not of a type(s) array"];
|
||||
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
|
||||
});
|
||||
|
||||
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
|
||||
t.notThrows(() =>
|
||||
dbConfig.parseUserConfig(
|
||||
logger,
|
||||
"test",
|
||||
`
|
||||
paths-ignore:
|
||||
- "some/path"
|
||||
queries: true
|
||||
`,
|
||||
false,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import * as path from "path";
|
||||
|
||||
import * as yaml from "js-yaml";
|
||||
import * as jsonschema from "jsonschema";
|
||||
import * as semver from "semver";
|
||||
|
||||
import * as errorMessages from "../error-messages";
|
||||
@@ -378,10 +380,7 @@ function combineQueries(
|
||||
const result: QuerySpec[] = [];
|
||||
|
||||
// Query settings obtained from the repository properties have the highest precedence.
|
||||
if (
|
||||
augmentationProperties.repoPropertyQueries &&
|
||||
augmentationProperties.repoPropertyQueries.input
|
||||
) {
|
||||
if (augmentationProperties.repoPropertyQueries?.input) {
|
||||
logger.info(
|
||||
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
||||
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
||||
@@ -474,3 +473,53 @@ export function generateCodeScanningConfig(
|
||||
|
||||
return augmentedConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to parse `contents` into a `UserConfig` value.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
|
||||
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
|
||||
* @param validateConfig Whether to validate the configuration file against the schema.
|
||||
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
|
||||
* @throws A `ConfigurationError` if parsing failed.
|
||||
*/
|
||||
export function parseUserConfig(
|
||||
logger: Logger,
|
||||
pathInput: string,
|
||||
contents: string,
|
||||
validateConfig: boolean,
|
||||
): UserConfig {
|
||||
try {
|
||||
const schema =
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
require("../../src/db-config-schema.json") as jsonschema.Schema;
|
||||
|
||||
const doc = yaml.load(contents);
|
||||
|
||||
if (validateConfig) {
|
||||
const result = new jsonschema.Validator().validate(doc, schema);
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
for (const error of result.errors) {
|
||||
logger.error(error.stack);
|
||||
}
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getInvalidConfigFileMessage(
|
||||
pathInput,
|
||||
result.errors.map((e) => e.stack),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return doc as UserConfig;
|
||||
} catch (error) {
|
||||
if (error instanceof yaml.YAMLException) {
|
||||
throw new ConfigurationError(
|
||||
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import * as apiClient from "./api-client";
|
||||
import { createStubCodeQL } from "./codeql";
|
||||
@@ -108,6 +109,39 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("upload-database")
|
||||
.returns("true");
|
||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
||||
|
||||
await mockHttpRequests(201);
|
||||
|
||||
const loggedMessages = [];
|
||||
await uploadDatabases(
|
||||
testRepoName,
|
||||
getCodeQL(),
|
||||
{
|
||||
...getTestConfig(tmpDir),
|
||||
analysisKinds: [AnalysisKind.CodeQuality],
|
||||
},
|
||||
testApiDetails,
|
||||
getRecordingLogger(loggedMessages),
|
||||
);
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
|
||||
) !== undefined,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("Abort database upload if running against GHES", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
@@ -22,6 +23,13 @@ export async function uploadDatabases(
|
||||
return;
|
||||
}
|
||||
|
||||
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
|
||||
logger.debug(
|
||||
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (util.isInTestMode()) {
|
||||
logger.debug("In test mode. Skipping database upload.");
|
||||
return;
|
||||
|
||||
145
src/db-config-schema.json
Normal file
145
src/db-config-schema.json
Normal file
@@ -0,0 +1,145 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "CodeQL Database Configuration",
|
||||
"description": "Format of the config file supplied by the user for CodeQL analysis",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the configuration"
|
||||
},
|
||||
"disable-default-queries": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to disable default queries"
|
||||
},
|
||||
"queries": {
|
||||
"type": "array",
|
||||
"description": "List of additional queries to run",
|
||||
"items": {
|
||||
"$ref": "#/definitions/QuerySpec"
|
||||
}
|
||||
},
|
||||
"paths-ignore": {
|
||||
"type": "array",
|
||||
"description": "Paths to ignore during analysis",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"type": "array",
|
||||
"description": "Paths to include in analysis",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"packs": {
|
||||
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"query-filters": {
|
||||
"type": "array",
|
||||
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
|
||||
"items": {
|
||||
"$ref": "#/definitions/QueryFilter"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalProperties": true,
|
||||
"definitions": {
|
||||
"QuerySpec": {
|
||||
"type": "object",
|
||||
"description": "Detailed query specification object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Optional name for the query"
|
||||
},
|
||||
"uses": {
|
||||
"type": "string",
|
||||
"description": "The query or query suite to use"
|
||||
}
|
||||
},
|
||||
"required": ["uses"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"QueryFilter": {
|
||||
"description": "Query filter that can either include or exclude queries",
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/definitions/ExcludeQueryFilter"
|
||||
},
|
||||
{
|
||||
"$ref": "#/definitions/IncludeQueryFilter"
|
||||
},
|
||||
{}
|
||||
]
|
||||
},
|
||||
"ExcludeQueryFilter": {
|
||||
"type": "object",
|
||||
"description": "Filter to exclude queries",
|
||||
"properties": {
|
||||
"exclude": {
|
||||
"type": "object",
|
||||
"description": "Queries to exclude",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["exclude"],
|
||||
"additionalProperties": false
|
||||
},
|
||||
"IncludeQueryFilter": {
|
||||
"type": "object",
|
||||
"description": "Filter to include queries",
|
||||
"properties": {
|
||||
"include": {
|
||||
"type": "object",
|
||||
"description": "Queries to include",
|
||||
"additionalProperties": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["include"],
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import * as artifact from "@actions/artifact";
|
||||
import * as artifactLegacy from "@actions/artifact-legacy";
|
||||
import * as core from "@actions/core";
|
||||
import archiver from "archiver";
|
||||
import * as del from "del";
|
||||
|
||||
import { getOptionalInput, getTemporaryDirectory } from "./actions-util";
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
@@ -345,7 +344,7 @@ async function createPartialDatabaseBundle(
|
||||
);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await del.deleteAsync(databaseBundlePath, { force: true });
|
||||
await fs.promises.rm(databaseBundlePath, { force: true });
|
||||
}
|
||||
const output = fs.createWriteStream(databaseBundlePath);
|
||||
const zip = archiver("zip");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
||||
"cliVersion": "2.23.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
||||
"priorCliVersion": "2.23.2"
|
||||
}
|
||||
|
||||
@@ -4,7 +4,10 @@ import * as sinon from "sinon";
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import * as apiClient from "./api-client";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import {
|
||||
shouldPerformDiffInformedAnalysis,
|
||||
exportedForTesting,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
@@ -183,3 +186,201 @@ test(
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return exportedForTesting.getDiffRanges(
|
||||
{
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
},
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
}
|
||||
|
||||
test("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
|
||||
@@ -3,12 +3,25 @@ import * as path from "path";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import type { PullRequestBranches } from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { getApiClient, getGitHubVersion } from "./api-client";
|
||||
import type { CodeQL } from "./codeql";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwoFromEnv } from "./repository";
|
||||
import { GitHubVariant, satisfiesGHESVersion } from "./util";
|
||||
|
||||
/**
|
||||
* This interface is an abbreviated version of the file diff object returned by
|
||||
* the GitHub API.
|
||||
*/
|
||||
interface FileDiff {
|
||||
filename: string;
|
||||
changes: number;
|
||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||
// or if the file is unchanged.
|
||||
patch?: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the action should perform diff-informed analysis.
|
||||
*/
|
||||
@@ -93,3 +106,174 @@ export function readDiffRangesJsonFile(
|
||||
);
|
||||
return JSON.parse(jsonContents) as DiffThunkRange[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param branches The base and head branches of the pull request.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
export async function getPullRequestEditedDiffRanges(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<DiffThunkRange[] | undefined> {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(
|
||||
`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const results: DiffThunkRange[] = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
async function getFileDiffsWithBasehead(
|
||||
branches: PullRequestBranches,
|
||||
logger: Logger,
|
||||
): Promise<FileDiff[] | undefined> {
|
||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||
// to GITHUB_REPOSITORY.
|
||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||
"CODE_SCANNING_REPOSITORY",
|
||||
"GITHUB_REPOSITORY",
|
||||
);
|
||||
const basehead = `${branches.base}...${branches.head}`;
|
||||
try {
|
||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
},
|
||||
);
|
||||
logger.debug(
|
||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`,
|
||||
);
|
||||
return response.data.files;
|
||||
} catch (error: any) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(
|
||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||
);
|
||||
return undefined;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getDiffRanges(
|
||||
fileDiff: FileDiff,
|
||||
logger: Logger,
|
||||
): DiffThunkRange[] | undefined {
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should fall back
|
||||
// to a special diff range that covers the entire file.
|
||||
return [
|
||||
{
|
||||
path: filename,
|
||||
startLine: 0,
|
||||
endLine: 0,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine: number | undefined = undefined;
|
||||
const diffRanges: DiffThunkRange[] = [];
|
||||
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(
|
||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
|
||||
export const exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
|
||||
@@ -47,6 +47,9 @@ export enum EnvVar {
|
||||
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
||||
HAS_WARNED_ABOUT_DISK_SPACE = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE",
|
||||
|
||||
/** Whether the `setup-codeql` action has been run. */
|
||||
SETUP_CODEQL_ACTION_HAS_RUN = "CODEQL_ACTION_SETUP_CODEQL_HAS_RUN",
|
||||
|
||||
/** Whether the init action has been run. */
|
||||
INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN",
|
||||
|
||||
@@ -134,4 +137,10 @@ export enum EnvVar {
|
||||
* This setting is more specific than `CODEQL_ACTION_TEST_MODE`, which implies this option.
|
||||
*/
|
||||
SKIP_SARIF_UPLOAD = "CODEQL_ACTION_SKIP_SARIF_UPLOAD",
|
||||
|
||||
/**
|
||||
* Whether to skip workflow validation. Intended for internal use, where we know that
|
||||
* the workflow is valid and validation is not necessary.
|
||||
*/
|
||||
SKIP_WORKFLOW_VALIDATION = "CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION",
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user