mirror of
https://github.com/github/codeql-action.git
synced 2025-12-22 07:10:23 +08:00
Compare commits
3 Commits
mbg/csharp
...
copilot/up
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa79376718 | ||
|
|
80f8fb8bc7 | ||
|
|
1373f1f5c9 |
@@ -16,9 +16,9 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v6
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.12'
|
python-version: 3.12
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
8
.github/pull_request_template.md
vendored
8
.github/pull_request_template.md
vendored
@@ -29,13 +29,14 @@ For internal use only. Please select the risk level of this change:
|
|||||||
|
|
||||||
#### How did/will you validate this change?
|
#### How did/will you validate this change?
|
||||||
|
|
||||||
<!-- Delete options that don't apply. -->
|
<!-- Delete options that don't apply. Be explicit about test coverage. -->
|
||||||
|
|
||||||
- **Test repository** - This change will be tested on a test repository before merging.
|
- **Test repository** - This change will be tested on a test repository before merging.
|
||||||
- **Unit tests** - I am depending on unit test coverage (i.e. tests in `.test.ts` files).
|
- **Unit tests** - I am depending on existing unit test coverage (i.e. tests in `.test.ts` files).
|
||||||
- **End-to-end tests** - I am depending on PR checks (i.e. tests in `pr-checks`).
|
- **End-to-end tests** - I am depending on PR checks (i.e. tests in `pr-checks`).
|
||||||
|
- **New / updated tests** - I have added or updated tests (summarize below).
|
||||||
- **Other** - Please provide details.
|
- **Other** - Please provide details.
|
||||||
- **None** - I am not validating these changes.
|
- **None** - I am not validating these changes (provide justification below).
|
||||||
|
|
||||||
#### If something goes wrong after this change is released, what are the mitigation and rollback strategies?
|
#### If something goes wrong after this change is released, what are the mitigation and rollback strategies?
|
||||||
|
|
||||||
@@ -57,5 +58,6 @@ For internal use only. Please select the risk level of this change:
|
|||||||
### Merge / deployment checklist
|
### Merge / deployment checklist
|
||||||
|
|
||||||
- Confirm this change is backwards compatible with existing workflows.
|
- Confirm this change is backwards compatible with existing workflows.
|
||||||
|
- Confirm that tests have been added/updated or are not needed.
|
||||||
- Consider adding a [changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) entry for this change.
|
- Consider adding a [changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) entry for this change.
|
||||||
- Confirm the [readme](https://github.com/github/codeql-action/blob/main/README.md) and docs have been updated if necessary.
|
- Confirm the [readme](https://github.com/github/codeql-action/blob/main/README.md) and docs have been updated if necessary.
|
||||||
|
|||||||
55
.github/sizeup.yml
vendored
55
.github/sizeup.yml
vendored
@@ -1,55 +0,0 @@
|
|||||||
labeling:
|
|
||||||
applyCategoryLabels: true
|
|
||||||
categoryLabelPrefix: "size/"
|
|
||||||
|
|
||||||
commenting:
|
|
||||||
addCommentWhenScoreThresholdHasBeenExceeded: false
|
|
||||||
|
|
||||||
sizeup:
|
|
||||||
categories:
|
|
||||||
- name: extra small
|
|
||||||
lte: 25
|
|
||||||
label:
|
|
||||||
name: XS
|
|
||||||
description: Should be very easy to review
|
|
||||||
color: 3cbf00
|
|
||||||
- name: small
|
|
||||||
lte: 100
|
|
||||||
label:
|
|
||||||
name: S
|
|
||||||
description: Should be easy to review
|
|
||||||
color: 5d9801
|
|
||||||
- name: medium
|
|
||||||
lte: 250
|
|
||||||
label:
|
|
||||||
name: M
|
|
||||||
description: Should be of average difficulty to review
|
|
||||||
color: 7f7203
|
|
||||||
- name: large
|
|
||||||
lte: 500
|
|
||||||
label:
|
|
||||||
name: L
|
|
||||||
description: May be hard to review
|
|
||||||
color: a14c05
|
|
||||||
- name: extra large
|
|
||||||
lte: 1000
|
|
||||||
label:
|
|
||||||
name: XL
|
|
||||||
description: May be very hard to review
|
|
||||||
color: c32607
|
|
||||||
- name: extra extra large
|
|
||||||
label:
|
|
||||||
name: XXL
|
|
||||||
description: May be extremely hard to review
|
|
||||||
color: e50009
|
|
||||||
ignoredFilePatterns:
|
|
||||||
- ".github/workflows/__*"
|
|
||||||
- "lib/**/*"
|
|
||||||
- "package-lock.json"
|
|
||||||
testFilePatterns:
|
|
||||||
- "**/*.test.ts"
|
|
||||||
scoring:
|
|
||||||
# This formula and the aliases below it are written in prefix notation.
|
|
||||||
# For an explanation of how this works, please see:
|
|
||||||
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
|
|
||||||
formula: "- - + additions deletions comments whitespace"
|
|
||||||
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
@@ -67,9 +67,10 @@ jobs:
|
|||||||
if (allCodeqlVersions.length === 0) {
|
if (allCodeqlVersions.length === 0) {
|
||||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||||
}
|
}
|
||||||
- id: setup-codeql
|
- id: init
|
||||||
uses: ./../action/setup-codeql
|
uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Check CodeQL is installed within the toolcache
|
- name: Check CodeQL is installed within the toolcache
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
2
.github/workflows/__bundle-zstd.yml
generated
vendored
2
.github/workflows/__bundle-zstd.yml
generated
vendored
@@ -79,7 +79,7 @@ jobs:
|
|||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
2
.github/workflows/__config-export.yml
generated
vendored
2
.github/workflows/__config-export.yml
generated
vendored
@@ -67,7 +67,7 @@ jobs:
|
|||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
2
.github/workflows/__config-input.yml
generated
vendored
2
.github/workflows/__config-input.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -78,7 +78,7 @@ jobs:
|
|||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -85,7 +85,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
2
.github/workflows/__job-run-uuid-sarif.yml
generated
vendored
@@ -64,7 +64,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
15
.github/workflows/__local-bundle.yml
generated
vendored
15
.github/workflows/__local-bundle.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Fetch latest CodeQL bundle
|
- name: Fetch latest CodeQL bundle
|
||||||
run: |
|
run: |
|
||||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||||
|
|||||||
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
15
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -114,11 +104,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Use Xcode 16
|
- name: Use Xcode 16
|
||||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||||
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
||||||
|
|||||||
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -73,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
@@ -91,11 +81,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
|||||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
13
.github/workflows/__quality-queries.yml
generated
vendored
13
.github/workflows/__quality-queries.yml
generated
vendored
@@ -80,10 +80,9 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
|
||||||
- name: Upload security SARIF
|
- name: Upload security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: |
|
name: |
|
||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||||
@@ -91,20 +90,12 @@ jobs:
|
|||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload quality SARIF
|
- name: Upload quality SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: |
|
name: |
|
||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload post-processed SARIF
|
|
||||||
uses: actions/upload-artifact@v5
|
|
||||||
with:
|
|
||||||
name: |
|
|
||||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
|
||||||
path: ${{ runner.temp }}/post-processed
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
- name: Check quality query does not appear in security SARIF
|
- name: Check quality query does not appear in security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
15
.github/workflows/__remote-config.yml
generated
vendored
15
.github/workflows/__remote-config.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -82,11 +72,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -56,7 +56,7 @@ jobs:
|
|||||||
use-all-platform-bundle: 'false'
|
use-all-platform-bundle: 'false'
|
||||||
setup-kotlin: 'true'
|
setup-kotlin: 'true'
|
||||||
- name: Set up Ruby
|
- name: Set up Ruby
|
||||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||||
with:
|
with:
|
||||||
ruby-version: 2.6
|
ruby-version: 2.6
|
||||||
- name: Install Code Scanning integration
|
- name: Install Code Scanning integration
|
||||||
|
|||||||
15
.github/workflows/__unset-environment.yml
generated
vendored
15
.github/workflows/__unset-environment.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -82,11 +72,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
id: init
|
id: init
|
||||||
with:
|
with:
|
||||||
|
|||||||
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
15
.github/workflows/__upload-sarif.yml
generated
vendored
15
.github/workflows/__upload-sarif.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -87,11 +77,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
15
.github/workflows/__with-checkout-path.yml
generated
vendored
15
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Delete original checkout
|
- name: Delete original checkout
|
||||||
run: |
|
run: |
|
||||||
# delete the original checkout so we don't accidentally use it.
|
# delete the original checkout so we don't accidentally use it.
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-expected-release-files:
|
check-expected-release-files:
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
|||||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -81,7 +81,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-22.04,ubuntu-24.04,windows-2022,windows-2025,macos-14,macos-15]
|
os: [ubuntu-22.04,ubuntu-24.04,windows-2022,windows-2025,macos-13,macos-14,macos-15]
|
||||||
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
tools: ${{ fromJson(needs.check-codeql-versions.outputs.versions) }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all artifacts
|
- name: Download all artifacts
|
||||||
uses: actions/download-artifact@v6
|
uses: actions/download-artifact@v5
|
||||||
- name: Check expected artifacts exist
|
- name: Check expected artifacts exist
|
||||||
run: |
|
run: |
|
||||||
LANGUAGES="cpp csharp go java javascript python"
|
LANGUAGES="cpp csharp go java javascript python"
|
||||||
|
|||||||
2
.github/workflows/debug-artifacts-safe.yml
vendored
2
.github/workflows/debug-artifacts-safe.yml
vendored
@@ -73,7 +73,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all artifacts
|
- name: Download all artifacts
|
||||||
uses: actions/download-artifact@v6
|
uses: actions/download-artifact@v5
|
||||||
- name: Check expected artifacts exist
|
- name: Check expected artifacts exist
|
||||||
run: |
|
run: |
|
||||||
VERSIONS="stable-v2.20.3 default linked nightly-latest"
|
VERSIONS="stable-v2.20.3 default linked nightly-latest"
|
||||||
|
|||||||
26
.github/workflows/label-pr-size.yml
vendored
26
.github/workflows/label-pr-size.yml
vendored
@@ -1,26 +0,0 @@
|
|||||||
name: Label PR with size
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- synchronize
|
|
||||||
- reopened
|
|
||||||
- edited
|
|
||||||
- ready_for_review
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sizeup:
|
|
||||||
name: Label PR with size
|
|
||||||
runs-on: ubuntu-slim
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run sizeup
|
|
||||||
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
|
|
||||||
with:
|
|
||||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
|
||||||
configuration-file-path: ".github/sizeup.yml"
|
|
||||||
7
.github/workflows/post-release-mergeback.yml
vendored
7
.github/workflows/post-release-mergeback.yml
vendored
@@ -24,7 +24,7 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
merge-back:
|
merge-back:
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
environment: Automation
|
environment: Automation
|
||||||
if: github.repository == 'github/codeql-action'
|
if: github.repository == 'github/codeql-action'
|
||||||
env:
|
env:
|
||||||
@@ -47,10 +47,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||||
- uses: actions/setup-node@v6
|
- uses: actions/setup-node@v5
|
||||||
- uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.12'
|
|
||||||
|
|
||||||
- name: Update git config
|
- name: Update git config
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|||||||
2
.github/workflows/prepare-release.yml
vendored
2
.github/workflows/prepare-release.yml
vendored
@@ -29,7 +29,7 @@ defaults:
|
|||||||
jobs:
|
jobs:
|
||||||
prepare:
|
prepare:
|
||||||
name: "Prepare release"
|
name: "Prepare release"
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == 'github/codeql-action'
|
if: github.repository == 'github/codeql-action'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
|
|||||||
28
.github/workflows/publish-immutable-action.yml
vendored
28
.github/workflows/publish-immutable-action.yml
vendored
@@ -1,10 +1,8 @@
|
|||||||
name: 'Publish Immutable Action Version'
|
name: 'Publish Immutable Action Version'
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
release:
|
||||||
tags:
|
types: [published]
|
||||||
# Match version tags, but not the major version tags.
|
|
||||||
- 'v[0-9]+.**'
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -12,16 +10,30 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish:
|
publish:
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
id-token: write
|
id-token: write
|
||||||
packages: write
|
packages: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Check release name
|
||||||
|
id: check
|
||||||
|
env:
|
||||||
|
RELEASE_NAME: ${{ github.event.release.name }}
|
||||||
|
run: |
|
||||||
|
echo "Release name: ${{ github.event.release.name }}"
|
||||||
|
if [[ $RELEASE_NAME == v* ]]; then
|
||||||
|
echo "This is a CodeQL Action release. Create an Immutable Action"
|
||||||
|
echo "is-action-release=true" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "This is a CodeQL Bundle release. Do not create an Immutable Action"
|
||||||
|
echo "is-action-release=false" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: Checking out
|
||||||
|
if: steps.check.outputs.is-action-release == 'true'
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
- name: Publish
|
||||||
- name: Publish immutable release
|
if: steps.check.outputs.is-action-release == 'true'
|
||||||
id: publish
|
id: publish
|
||||||
uses: actions/publish-immutable-action@v0.0.4
|
uses: actions/publish-immutable-action@v0.0.4
|
||||||
|
|||||||
2
.github/workflows/query-filters.yml
vendored
2
.github/workflows/query-filters.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
18
.github/workflows/script/bundle_changelog.py
vendored
18
.github/workflows/script/bundle_changelog.py
vendored
@@ -1,18 +0,0 @@
|
|||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
# Get the PR number from the PR URL.
|
|
||||||
pr_number = os.environ['PR_URL'].split('/')[-1]
|
|
||||||
changelog_note = f"- Update default CodeQL bundle version to {os.environ['CLI_VERSION']}. [#{pr_number}]({os.environ['PR_URL']})"
|
|
||||||
|
|
||||||
# If the "[UNRELEASED]" section starts with "no user facing changes", remove that line.
|
|
||||||
with open('CHANGELOG.md', 'r') as f:
|
|
||||||
changelog = f.read()
|
|
||||||
|
|
||||||
changelog = changelog.replace('## [UNRELEASED]\n\nNo user facing changes.', '## [UNRELEASED]\n')
|
|
||||||
|
|
||||||
# Add the changelog note to the bottom of the "[UNRELEASED]" section.
|
|
||||||
changelog = re.sub(r'\n## (\d+\.\d+\.\d+)', f'{changelog_note}\n\n## \\1', changelog, count=1)
|
|
||||||
|
|
||||||
with open('CHANGELOG.md', 'w') as f:
|
|
||||||
f.write(changelog)
|
|
||||||
@@ -29,7 +29,7 @@ fi
|
|||||||
echo "Getting checks for $GITHUB_SHA"
|
echo "Getting checks for $GITHUB_SHA"
|
||||||
|
|
||||||
# Ignore any checks with "https://", CodeQL, LGTM, Update, and ESLint checks.
|
# Ignore any checks with "https://", CodeQL, LGTM, Update, and ESLint checks.
|
||||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" | not)] | unique | sort')"
|
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||||
|
|
||||||
echo "$CHECKS" | jq
|
echo "$CHECKS" | jq
|
||||||
|
|
||||||
|
|||||||
31
.github/workflows/update-bundle.yml
vendored
31
.github/workflows/update-bundle.yml
vendored
@@ -20,7 +20,7 @@ defaults:
|
|||||||
jobs:
|
jobs:
|
||||||
update-bundle:
|
update-bundle:
|
||||||
if: github.event.release.prerelease && startsWith(github.event.release.tag_name, 'codeql-bundle-')
|
if: github.event.release.prerelease && startsWith(github.event.release.tag_name, 'codeql-bundle-')
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # needed to push commits
|
contents: write # needed to push commits
|
||||||
pull-requests: write # needed to create pull requests
|
pull-requests: write # needed to create pull requests
|
||||||
@@ -40,13 +40,8 @@ jobs:
|
|||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: '3.12'
|
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
@@ -83,8 +78,28 @@ jobs:
|
|||||||
echo "PR_URL=$pr_url" | tee -a "$GITHUB_ENV"
|
echo "PR_URL=$pr_url" | tee -a "$GITHUB_ENV"
|
||||||
|
|
||||||
- name: Create changelog note
|
- name: Create changelog note
|
||||||
|
shell: python
|
||||||
run: |
|
run: |
|
||||||
python .github/workflows/script/bundle_changelog.py
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
# Get the PR number from the PR URL.
|
||||||
|
pr_number = os.environ['PR_URL'].split('/')[-1]
|
||||||
|
changelog_note = f"- Update default CodeQL bundle version to {os.environ['CLI_VERSION']}. [#{pr_number}]({os.environ['PR_URL']})"
|
||||||
|
|
||||||
|
# If the "[UNRELEASED]" section starts with "no user facing changes", remove that line.
|
||||||
|
# Use perl to avoid having to escape the newline character.
|
||||||
|
|
||||||
|
with open('CHANGELOG.md', 'r') as f:
|
||||||
|
changelog = f.read()
|
||||||
|
|
||||||
|
changelog = changelog.replace('## [UNRELEASED]\n\nNo user facing changes.', '## [UNRELEASED]\n')
|
||||||
|
|
||||||
|
# Add the changelog note to the bottom of the "[UNRELEASED]" section.
|
||||||
|
changelog = re.sub(r'\n## (\d+\.\d+\.\d+)', f'{changelog_note}\n\n## \\1', changelog, count=1)
|
||||||
|
|
||||||
|
with open('CHANGELOG.md', 'w') as f:
|
||||||
|
f.write(changelog)
|
||||||
|
|
||||||
- name: Push changelog note
|
- name: Push changelog note
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/update-release-branch.yml
vendored
4
.github/workflows/update-release-branch.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
|
|
||||||
update:
|
update:
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
needs: [prepare]
|
needs: [prepare]
|
||||||
env:
|
env:
|
||||||
@@ -77,7 +77,7 @@ jobs:
|
|||||||
|
|
||||||
backport:
|
backport:
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
environment: Automation
|
environment: Automation
|
||||||
needs: [prepare]
|
needs: [prepare]
|
||||||
if: ${{ (github.event_name == 'push') && needs.prepare.outputs.backport_target_branches != '[]' }}
|
if: ${{ (github.event_name == 'push') && needs.prepare.outputs.backport_target_branches != '[]' }}
|
||||||
|
|||||||
@@ -4,18 +4,12 @@ on:
|
|||||||
schedule:
|
schedule:
|
||||||
- cron: "0 0 * * *"
|
- cron: "0 0 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
paths:
|
|
||||||
- .github/workflows/update-supported-enterprise-server-versions.yml
|
|
||||||
- .github/workflows/update-supported-enterprise-server-versions/update.py
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update-supported-enterprise-server-versions:
|
update-supported-enterprise-server-versions:
|
||||||
name: Update Supported Enterprise Server Versions
|
name: Update Supported Enterprise Server Versions
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ubuntu-slim
|
runs-on: ubuntu-latest
|
||||||
if: github.repository == 'github/codeql-action'
|
if: github.repository == 'github/codeql-action'
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # needed to push commits
|
contents: write # needed to push commits
|
||||||
@@ -34,7 +28,6 @@ jobs:
|
|||||||
repository: github/enterprise-releases
|
repository: github/enterprise-releases
|
||||||
token: ${{ secrets.ENTERPRISE_RELEASE_TOKEN }}
|
token: ${{ secrets.ENTERPRISE_RELEASE_TOKEN }}
|
||||||
path: ${{ github.workspace }}/enterprise-releases/
|
path: ${{ github.workspace }}/enterprise-releases/
|
||||||
sparse-checkout: releases.json
|
|
||||||
- name: Update Supported Enterprise Server Versions
|
- name: Update Supported Enterprise Server Versions
|
||||||
run: |
|
run: |
|
||||||
cd ./.github/workflows/update-supported-enterprise-server-versions/
|
cd ./.github/workflows/update-supported-enterprise-server-versions/
|
||||||
@@ -42,7 +35,6 @@ jobs:
|
|||||||
pipenv install
|
pipenv install
|
||||||
pipenv run ./update.py
|
pipenv run ./update.py
|
||||||
rm --recursive "$ENTERPRISE_RELEASES_PATH"
|
rm --recursive "$ENTERPRISE_RELEASES_PATH"
|
||||||
npm ci
|
|
||||||
npm run build
|
npm run build
|
||||||
env:
|
env:
|
||||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||||
@@ -52,33 +44,25 @@ jobs:
|
|||||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|
||||||
- name: Commit changes
|
- name: Commit changes and open PR
|
||||||
id: prepare-commit
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
if [[ -z $(git status --porcelain) ]]; then
|
if [[ -z $(git status --porcelain) ]]; then
|
||||||
echo "No changes to commit"
|
echo "No changes to commit"
|
||||||
echo "committed=false" >> $GITHUB_OUTPUT
|
|
||||||
else
|
else
|
||||||
git checkout -b update-supported-enterprise-server-versions
|
git checkout -b update-supported-enterprise-server-versions
|
||||||
git add .
|
git add .
|
||||||
git commit --message "Update supported GitHub Enterprise Server versions"
|
git commit --message "Update supported GitHub Enterprise Server versions"
|
||||||
|
git push origin update-supported-enterprise-server-versions
|
||||||
|
|
||||||
echo "committed=true" >> $GITHUB_OUTPUT
|
body="This PR updates the list of supported GitHub Enterprise Server versions, either because a new "
|
||||||
|
body+="version is about to be feature frozen, or because an old release has been deprecated."
|
||||||
|
body+=$'\n\n'
|
||||||
|
body+="If an old release has been deprecated, please follow the instructions in CONTRIBUTING.md to "
|
||||||
|
body+="deprecate the corresponding version of CodeQL."
|
||||||
|
|
||||||
|
gh pr create --draft \
|
||||||
|
--title "Update supported GitHub Enterprise Server versions" \
|
||||||
|
--body "$body"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Open PR
|
|
||||||
if: github.event_name != 'pull_request' && steps.prepare-commit.outputs.committed == 'true'
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
|
||||||
git push origin update-supported-enterprise-server-versions
|
|
||||||
|
|
||||||
body="This PR updates the list of supported GitHub Enterprise Server versions, either because a new "
|
|
||||||
body+="version is about to be feature frozen, or because an old release has been deprecated."
|
|
||||||
body+=$'\n\n'
|
|
||||||
body+="If an old release has been deprecated, please follow the instructions in CONTRIBUTING.md to "
|
|
||||||
body+="deprecate the corresponding version of CodeQL."
|
|
||||||
|
|
||||||
gh pr create --draft \
|
|
||||||
--title "Update supported GitHub Enterprise Server versions" \
|
|
||||||
--body "$body"
|
|
||||||
|
|||||||
18
CHANGELOG.md
18
CHANGELOG.md
@@ -4,26 +4,8 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
|||||||
|
|
||||||
## [UNRELEASED]
|
## [UNRELEASED]
|
||||||
|
|
||||||
- CodeQL Action v3 will be deprecated in December 2026. The Action now logs a warning for customers who are running v3 but could be running v4. For more information, see [Upcoming deprecation of CodeQL Action v3](https://github.blog/changelog/2025-10-28-upcoming-deprecation-of-codeql-action-v3/).
|
|
||||||
|
|
||||||
## 4.31.2 - 30 Oct 2025
|
|
||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|
||||||
## 4.31.1 - 30 Oct 2025
|
|
||||||
|
|
||||||
- The `add-snippets` input has been removed from the `analyze` action. This input has been deprecated since CodeQL Action 3.26.4 in August 2024 when this removal was announced.
|
|
||||||
|
|
||||||
## 4.31.0 - 24 Oct 2025
|
|
||||||
|
|
||||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
|
||||||
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
|
|
||||||
|
|
||||||
## 4.30.9 - 17 Oct 2025
|
|
||||||
|
|
||||||
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
|
|
||||||
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
|
|
||||||
|
|
||||||
## 4.30.8 - 10 Oct 2025
|
## 4.30.8 - 10 Oct 2025
|
||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ Actions with special purposes and unlikely to be used directly:
|
|||||||
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
||||||
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
||||||
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
||||||
- `setup-codeql`: [Experimental] Similar to `init`, except it only installs the CodeQL CLI and does not initialize a database.
|
|
||||||
|
|
||||||
### Workflow Permissions
|
### Workflow Permissions
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ inputs:
|
|||||||
description: The name of the check run to add text to.
|
description: The name of the check run to add text to.
|
||||||
required: false
|
required: false
|
||||||
output:
|
output:
|
||||||
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
|
description: The path of the directory in which to save the SARIF results
|
||||||
required: false
|
required: false
|
||||||
default: "../results"
|
default: "../results"
|
||||||
upload:
|
upload:
|
||||||
@@ -32,10 +32,14 @@ inputs:
|
|||||||
and 13GB for macOS).
|
and 13GB for macOS).
|
||||||
required: false
|
required: false
|
||||||
add-snippets:
|
add-snippets:
|
||||||
description: Does not have any effect.
|
description: Specify whether or not to add code snippets to the output sarif file.
|
||||||
required: false
|
required: false
|
||||||
|
default: "false"
|
||||||
deprecationMessage: >-
|
deprecationMessage: >-
|
||||||
The input "add-snippets" has been removed and no longer has any effect.
|
The input "add-snippets" is deprecated and will be removed on the first release in August 2025.
|
||||||
|
When this input is set to true it is expected to add code snippets with an alert to the SARIF file.
|
||||||
|
However, since Code Scanning ignores code snippets provided as part of a SARIF file this is currently
|
||||||
|
a no operation. No alternative is available.
|
||||||
skip-queries:
|
skip-queries:
|
||||||
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
description: If this option is set, the CodeQL database will be built but no queries will be run on it. Thus, no results will be produced.
|
||||||
required: false
|
required: false
|
||||||
@@ -66,12 +70,6 @@ inputs:
|
|||||||
description: Whether to upload the resulting CodeQL database
|
description: Whether to upload the resulting CodeQL database
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
post-processed-sarif-path:
|
|
||||||
description: >-
|
|
||||||
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
|
|
||||||
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
|
|
||||||
argument for this input, they are written to the specified directory.
|
|
||||||
required: false
|
|
||||||
wait-for-processing:
|
wait-for-processing:
|
||||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import filenames from "eslint-plugin-filenames";
|
|||||||
import github from "eslint-plugin-github";
|
import github from "eslint-plugin-github";
|
||||||
import _import from "eslint-plugin-import";
|
import _import from "eslint-plugin-import";
|
||||||
import noAsyncForeach from "eslint-plugin-no-async-foreach";
|
import noAsyncForeach from "eslint-plugin-no-async-foreach";
|
||||||
import jsdoc from "eslint-plugin-jsdoc";
|
|
||||||
import globals from "globals";
|
import globals from "globals";
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
@@ -53,7 +52,6 @@ export default [
|
|||||||
github: fixupPluginRules(github),
|
github: fixupPluginRules(github),
|
||||||
import: fixupPluginRules(_import),
|
import: fixupPluginRules(_import),
|
||||||
"no-async-foreach": noAsyncForeach,
|
"no-async-foreach": noAsyncForeach,
|
||||||
"jsdoc": jsdoc,
|
|
||||||
},
|
},
|
||||||
|
|
||||||
languageOptions: {
|
languageOptions: {
|
||||||
@@ -133,18 +131,7 @@ export default [
|
|||||||
"no-sequences": "error",
|
"no-sequences": "error",
|
||||||
"no-shadow": "off",
|
"no-shadow": "off",
|
||||||
"@typescript-eslint/no-shadow": "error",
|
"@typescript-eslint/no-shadow": "error",
|
||||||
"@typescript-eslint/prefer-optional-chain": "error",
|
|
||||||
"one-var": ["error", "never"],
|
"one-var": ["error", "never"],
|
||||||
|
|
||||||
// Check param names to ensure that we don't have outdated JSDocs.
|
|
||||||
"jsdoc/check-param-names": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
// We don't currently require full JSDoc coverage, so this rule
|
|
||||||
// should not error on missing @param annotations.
|
|
||||||
disableMissingParamChecks: true,
|
|
||||||
}
|
|
||||||
],
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
15075
lib/analyze-action-post.js
generated
15075
lib/analyze-action-post.js
generated
File diff suppressed because one or more lines are too long
16689
lib/analyze-action.js
generated
16689
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
7102
lib/autobuild-action.js
generated
7102
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||||
"cliVersion": "2.23.3",
|
"cliVersion": "2.23.2",
|
||||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||||
"priorCliVersion": "2.23.2"
|
"priorCliVersion": "2.23.1"
|
||||||
}
|
}
|
||||||
|
|||||||
25397
lib/init-action-post.js
generated
25397
lib/init-action-post.js
generated
File diff suppressed because one or more lines are too long
15298
lib/init-action.js
generated
15298
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
7099
lib/resolve-environment-action.js
generated
7099
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
86442
lib/setup-codeql-action.js
generated
86442
lib/setup-codeql-action.js
generated
File diff suppressed because one or more lines are too long
15008
lib/start-proxy-action-post.js
generated
15008
lib/start-proxy-action-post.js
generated
File diff suppressed because one or more lines are too long
6810
lib/start-proxy-action.js
generated
6810
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
13043
lib/upload-lib.js
generated
13043
lib/upload-lib.js
generated
File diff suppressed because it is too large
Load Diff
15038
lib/upload-sarif-action-post.js
generated
15038
lib/upload-sarif-action-post.js
generated
File diff suppressed because one or more lines are too long
13350
lib/upload-sarif-action.js
generated
13350
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
1875
package-lock.json
generated
1875
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
29
package.json
29
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "4.31.3",
|
"version": "4.30.9",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "CodeQL action",
|
"description": "CodeQL action",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -24,20 +24,23 @@
|
|||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^4.0.0",
|
"@actions/artifact": "^2.3.1",
|
||||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||||
"@actions/cache": "^4.1.0",
|
"@actions/cache": "^4.1.0",
|
||||||
"@actions/core": "^1.11.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^6.0.0",
|
||||||
"@actions/glob": "^0.5.0",
|
"@actions/glob": "^0.5.0",
|
||||||
"@actions/http-client": "^3.0.0",
|
"@actions/http-client": "^2.2.3",
|
||||||
"@actions/io": "^2.0.0",
|
"@actions/io": "^1.1.3",
|
||||||
"@actions/tool-cache": "^2.0.2",
|
"@actions/tool-cache": "^2.0.2",
|
||||||
"@octokit/plugin-retry": "^6.0.0",
|
"@octokit/plugin-retry": "^6.0.0",
|
||||||
"@octokit/request-error": "^7.0.2",
|
"@octokit/request-error": "^7.0.1",
|
||||||
"@schemastore/package": "0.0.10",
|
"@schemastore/package": "0.0.10",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
|
"check-disk-space": "^3.4.0",
|
||||||
|
"console-log-level": "^1.4.1",
|
||||||
|
"del": "^8.0.0",
|
||||||
"fast-deep-equal": "^3.1.3",
|
"fast-deep-equal": "^3.1.3",
|
||||||
"follow-redirects": "^1.15.11",
|
"follow-redirects": "^1.15.11",
|
||||||
"get-folder-size": "^5.0.0",
|
"get-folder-size": "^5.0.0",
|
||||||
@@ -45,34 +48,34 @@
|
|||||||
"jsonschema": "1.4.1",
|
"jsonschema": "1.4.1",
|
||||||
"long": "^5.3.2",
|
"long": "^5.3.2",
|
||||||
"node-forge": "^1.3.1",
|
"node-forge": "^1.3.1",
|
||||||
"octokit": "^5.0.5",
|
"octokit": "^5.0.3",
|
||||||
"semver": "^7.7.3",
|
"semver": "^7.7.3",
|
||||||
"uuid": "^13.0.0"
|
"uuid": "^13.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@ava/typescript": "6.0.0",
|
"@ava/typescript": "6.0.0",
|
||||||
"@eslint/compat": "^1.4.1",
|
"@eslint/compat": "^1.4.0",
|
||||||
"@eslint/eslintrc": "^3.3.1",
|
"@eslint/eslintrc": "^3.3.1",
|
||||||
"@eslint/js": "^9.39.1",
|
"@eslint/js": "^9.37.0",
|
||||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||||
"@octokit/types": "^16.0.0",
|
"@octokit/types": "^15.0.0",
|
||||||
"@types/archiver": "^7.0.0",
|
"@types/archiver": "^6.0.3",
|
||||||
|
"@types/console-log-level": "^1.4.5",
|
||||||
"@types/follow-redirects": "^1.14.4",
|
"@types/follow-redirects": "^1.14.4",
|
||||||
"@types/js-yaml": "^4.0.9",
|
"@types/js-yaml": "^4.0.9",
|
||||||
"@types/node": "20.19.9",
|
"@types/node": "20.19.9",
|
||||||
"@types/node-forge": "^1.3.14",
|
"@types/node-forge": "^1.3.14",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
"@types/sinon": "^17.0.4",
|
"@types/sinon": "^17.0.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.46.4",
|
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||||
"@typescript-eslint/parser": "^8.41.0",
|
"@typescript-eslint/parser": "^8.41.0",
|
||||||
"ava": "^6.4.1",
|
"ava": "^6.4.1",
|
||||||
"esbuild": "^0.27.0",
|
"esbuild": "^0.25.10",
|
||||||
"eslint": "^8.57.1",
|
"eslint": "^8.57.1",
|
||||||
"eslint-import-resolver-typescript": "^3.8.7",
|
"eslint-import-resolver-typescript": "^3.8.7",
|
||||||
"eslint-plugin-filenames": "^1.3.2",
|
"eslint-plugin-filenames": "^1.3.2",
|
||||||
"eslint-plugin-github": "^5.1.8",
|
"eslint-plugin-github": "^5.1.8",
|
||||||
"eslint-plugin-import": "2.29.1",
|
"eslint-plugin-import": "2.29.1",
|
||||||
"eslint-plugin-jsdoc": "^61.1.12",
|
|
||||||
"eslint-plugin-no-async-foreach": "^0.1.1",
|
"eslint-plugin-no-async-foreach": "^0.1.1",
|
||||||
"glob": "^11.0.3",
|
"glob": "^11.0.3",
|
||||||
"nock": "^14.0.10",
|
"nock": "^14.0.10",
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Analyze: 'ref' and 'sha' from inputs"
|
|||||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -15,9 +15,10 @@ steps:
|
|||||||
if (allCodeqlVersions.length === 0) {
|
if (allCodeqlVersions.length === 0) {
|
||||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||||
}
|
}
|
||||||
- id: setup-codeql
|
- id: init
|
||||||
uses: ./../action/setup-codeql
|
uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Check CodeQL is installed within the toolcache
|
- name: Check CodeQL is installed within the toolcache
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ steps:
|
|||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.os }}-zstd-bundle.sarif
|
name: ${{ matrix.os }}-zstd-bundle.sarif
|
||||||
path: ${{ runner.temp }}/results/javascript.sarif
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ steps:
|
|||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ steps:
|
|||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
upload-database: false
|
upload-database: false
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ steps:
|
|||||||
with:
|
with:
|
||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ steps:
|
|||||||
with:
|
with:
|
||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
- name: Upload SARIF
|
- name: Upload SARIF
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Local CodeQL bundle"
|
|||||||
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||||
versions: ["linked"]
|
versions: ["linked"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- name: Fetch latest CodeQL bundle
|
- name: Fetch latest CodeQL bundle
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ operatingSystems: ["macos", "ubuntu"]
|
|||||||
env:
|
env:
|
||||||
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- name: Use Xcode 16
|
- name: Use Xcode 16
|
||||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ description: "Checks that specifying packages using a combination of a config fi
|
|||||||
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
||||||
installGo: true
|
installGo: true
|
||||||
installNode: true
|
installNode: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -36,10 +36,9 @@ steps:
|
|||||||
with:
|
with:
|
||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
upload-database: false
|
upload-database: false
|
||||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
|
||||||
- name: Upload security SARIF
|
- name: Upload security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: |
|
name: |
|
||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||||
@@ -47,20 +46,12 @@ steps:
|
|||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload quality SARIF
|
- name: Upload quality SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: |
|
name: |
|
||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload post-processed SARIF
|
|
||||||
uses: actions/upload-artifact@v5
|
|
||||||
with:
|
|
||||||
name: |
|
|
||||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
|
||||||
path: "${{ runner.temp }}/post-processed"
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
- name: Check quality query does not appear in security SARIF
|
- name: Check quality query does not appear in security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ versions:
|
|||||||
- linked
|
- linked
|
||||||
- nightly-latest
|
- nightly-latest
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
|||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Ruby
|
- name: Set up Ruby
|
||||||
uses: ruby/setup-ruby@d5126b9b3579e429dd52e51e68624dda2e05be25 # v1.267.0
|
uses: ruby/setup-ruby@ab177d40ee5483edb974554986f56b33477e21d0 # v1.265.0
|
||||||
with:
|
with:
|
||||||
ruby-version: 2.6
|
ruby-version: 2.6
|
||||||
- name: Install Code Scanning integration
|
- name: Install Code Scanning integration
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ versions:
|
|||||||
- linked
|
- linked
|
||||||
- nightly-latest
|
- nightly-latest
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
id: init
|
id: init
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
|||||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ description: "Checks that uploading SARIFs to the code quality endpoint works"
|
|||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Use a custom `checkout_path`"
|
|||||||
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
||||||
versions: ["linked"]
|
versions: ["linked"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
# This ensures we don't accidentally use the original checkout for any part of the test.
|
# This ensures we don't accidentally use the original checkout for any part of the test.
|
||||||
- name: Delete original checkout
|
- name: Delete original checkout
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
|||||||
steps.extend([
|
steps.extend([
|
||||||
{
|
{
|
||||||
'name': 'Install Node.js',
|
'name': 'Install Node.js',
|
||||||
'uses': 'actions/setup-node@v6',
|
'uses': 'actions/setup-node@v5',
|
||||||
'with': {
|
'with': {
|
||||||
'node-version': '20.x',
|
'node-version': '20.x',
|
||||||
'cache': 'npm',
|
'cache': 'npm',
|
||||||
@@ -184,26 +184,6 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
installPython = is_truthy(checkSpecification.get('installPython', ''))
|
|
||||||
|
|
||||||
if installPython:
|
|
||||||
basePythonVersionExpr = '3.13'
|
|
||||||
workflowInputs['python-version'] = {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'The version of Python to install',
|
|
||||||
'required': False,
|
|
||||||
'default': basePythonVersionExpr,
|
|
||||||
}
|
|
||||||
|
|
||||||
steps.append({
|
|
||||||
'name': 'Install Python',
|
|
||||||
'if': 'matrix.version != \'nightly-latest\'',
|
|
||||||
'uses': 'actions/setup-python@v6',
|
|
||||||
'with': {
|
|
||||||
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
# If container initialisation steps are present in the check specification,
|
# If container initialisation steps are present in the check specification,
|
||||||
# make sure to execute them first.
|
# make sure to execute them first.
|
||||||
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
||||||
|
|||||||
@@ -9,15 +9,9 @@ if [ "$GITHUB_ACTIONS" = "true" ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if npm install is likely needed before proceeding
|
# Check if npm install is likely needed before proceeding
|
||||||
if [ ! -d node_modules ]; then
|
if [ ! -d node_modules ] || [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
||||||
echo "Running 'npm install' because 'node_modules' directory is missing."
|
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated..."
|
||||||
npm install
|
|
||||||
elif [ package.json -nt package-lock.json ]; then
|
|
||||||
echo "Running 'npm install' because 'package-lock.json' appears to be outdated."
|
|
||||||
npm install
|
|
||||||
elif [ package-lock.json -nt node_modules/.package-lock.json ]; then
|
|
||||||
echo "Running 'npm install' because 'node_modules/.package-lock.json' appears to be outdated."
|
|
||||||
npm install
|
npm install
|
||||||
else
|
else
|
||||||
echo "Skipping 'npm install' because everything appears to be up-to-date."
|
echo "Skipping 'npm install' because 'node_modules/.package-lock.json' appears to be up-to-date."
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
name: 'CodeQL: Setup'
|
|
||||||
description: 'Installs the CodeQL CLI'
|
|
||||||
author: 'GitHub'
|
|
||||||
inputs:
|
|
||||||
tools:
|
|
||||||
description: >-
|
|
||||||
By default, the Action will use the recommended version of the CodeQL
|
|
||||||
Bundle to analyze your project. You can override this choice using this
|
|
||||||
input. One of:
|
|
||||||
|
|
||||||
- A local path to a CodeQL Bundle tarball, or
|
|
||||||
- The URL of a CodeQL Bundle tarball GitHub release asset, or
|
|
||||||
- A special value `linked` which uses the version of the CodeQL tools
|
|
||||||
that the Action has been bundled with.
|
|
||||||
- A special value `nightly` which uses the latest nightly version of the
|
|
||||||
CodeQL tools. Note that this is unstable and not recommended for
|
|
||||||
production use.
|
|
||||||
|
|
||||||
If not specified, the Action will check in several places until it finds
|
|
||||||
the CodeQL tools.
|
|
||||||
required: false
|
|
||||||
token:
|
|
||||||
description: GitHub token to use for authenticating with this instance of GitHub.
|
|
||||||
default: ${{ github.token }}
|
|
||||||
required: false
|
|
||||||
matrix:
|
|
||||||
default: ${{ toJson(matrix) }}
|
|
||||||
required: false
|
|
||||||
external-repository-token:
|
|
||||||
description: A token for fetching additional files from private repositories in the same GitHub instance that is running this action.
|
|
||||||
required: false
|
|
||||||
outputs:
|
|
||||||
codeql-path:
|
|
||||||
description: The path of the CodeQL binary that was installed.
|
|
||||||
codeql-version:
|
|
||||||
description: The version of the CodeQL binary that was installed.
|
|
||||||
runs:
|
|
||||||
using: node24
|
|
||||||
main: '../lib/setup-codeql-action.js'
|
|
||||||
@@ -1,19 +1,12 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
|
||||||
import {
|
import {
|
||||||
AnalysisKind,
|
AnalysisKind,
|
||||||
getAnalysisKinds,
|
|
||||||
parseAnalysisKinds,
|
parseAnalysisKinds,
|
||||||
supportedAnalysisKinds,
|
supportedAnalysisKinds,
|
||||||
} from "./analyses";
|
} from "./analyses";
|
||||||
import { getRunnerLogger } from "./logging";
|
|
||||||
import { setupTests } from "./testing-utils";
|
|
||||||
import { ConfigurationError } from "./util";
|
import { ConfigurationError } from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
|
||||||
|
|
||||||
test("All known analysis kinds can be parsed successfully", async (t) => {
|
test("All known analysis kinds can be parsed successfully", async (t) => {
|
||||||
for (const analysisKind of supportedAnalysisKinds) {
|
for (const analysisKind of supportedAnalysisKinds) {
|
||||||
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
||||||
@@ -41,29 +34,3 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
|
|||||||
instanceOf: ConfigurationError,
|
instanceOf: ConfigurationError,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub
|
|
||||||
.withArgs("analysis-kinds")
|
|
||||||
.returns("code-scanning,code-quality");
|
|
||||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
|
|
||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
optionalInputStub.withArgs("quality-queries").returns("code-quality");
|
|
||||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
|
|
||||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,8 +1,4 @@
|
|||||||
import {
|
import { fixCodeQualityCategory } from "./actions-util";
|
||||||
fixCodeQualityCategory,
|
|
||||||
getOptionalInput,
|
|
||||||
getRequiredInput,
|
|
||||||
} from "./actions-util";
|
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { ConfigurationError } from "./util";
|
import { ConfigurationError } from "./util";
|
||||||
|
|
||||||
@@ -45,55 +41,6 @@ export async function parseAnalysisKinds(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used to avoid re-parsing the input after we have done it once.
|
|
||||||
let cachedAnalysisKinds: AnalysisKind[] | undefined;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialises the analysis kinds for the analysis based on the `analysis-kinds` input.
|
|
||||||
* This function will also use the deprecated `quality-queries` input as an indicator to enable `code-quality`.
|
|
||||||
* If the `analysis-kinds` input cannot be parsed, a `ConfigurationError` is thrown.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param skipCache For testing, whether to ignore the cached values (default: false).
|
|
||||||
*
|
|
||||||
* @returns The array of enabled analysis kinds.
|
|
||||||
* @throws A `ConfigurationError` if the `analysis-kinds` input cannot be parsed.
|
|
||||||
*/
|
|
||||||
export async function getAnalysisKinds(
|
|
||||||
logger: Logger,
|
|
||||||
skipCache: boolean = false,
|
|
||||||
): Promise<AnalysisKind[]> {
|
|
||||||
if (!skipCache && cachedAnalysisKinds !== undefined) {
|
|
||||||
return cachedAnalysisKinds;
|
|
||||||
}
|
|
||||||
|
|
||||||
cachedAnalysisKinds = await parseAnalysisKinds(
|
|
||||||
getRequiredInput("analysis-kinds"),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
|
||||||
const qualityQueriesInput = getOptionalInput("quality-queries");
|
|
||||||
|
|
||||||
if (qualityQueriesInput !== undefined) {
|
|
||||||
logger.warning(
|
|
||||||
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
|
||||||
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
|
||||||
// if an input to `quality-queries` was specified. We should remove this once
|
|
||||||
// `quality-queries` is no longer used.
|
|
||||||
if (
|
|
||||||
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
|
|
||||||
qualityQueriesInput !== undefined
|
|
||||||
) {
|
|
||||||
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
|
|
||||||
}
|
|
||||||
|
|
||||||
return cachedAnalysisKinds;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** The queries to use for Code Quality analyses. */
|
/** The queries to use for Code Quality analyses. */
|
||||||
export const codeQualityQueries: string[] = ["code-quality"];
|
export const codeQualityQueries: string[] = ["code-quality"];
|
||||||
|
|
||||||
|
|||||||
@@ -24,9 +24,6 @@ setupTests(test);
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
|
|
||||||
test("analyze action with RAM & threads from environment variables", async (t) => {
|
test("analyze action with RAM & threads from environment variables", async (t) => {
|
||||||
// This test frequently times out on Windows with the default timeout, so we bump
|
|
||||||
// it a bit to 20s.
|
|
||||||
t.timeout(1000 * 20);
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
@@ -78,7 +75,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
|
|||||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=4992");
|
||||||
t.assert(runQueriesStub.calledOnce);
|
t.assert(runQueriesStub.calledOnce);
|
||||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=4992");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ setupTests(test);
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
|
|
||||||
test("analyze action with RAM & threads from action inputs", async (t) => {
|
test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||||
t.timeout(1000 * 20);
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
@@ -76,7 +75,7 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
|
|||||||
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
t.deepEqual(runFinalizeStub.firstCall.args[1], "--threads=-1");
|
||||||
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
t.deepEqual(runFinalizeStub.firstCall.args[2], "--ram=3012");
|
||||||
t.assert(runQueriesStub.calledOnce);
|
t.assert(runQueriesStub.calledOnce);
|
||||||
t.deepEqual(runQueriesStub.firstCall.args[2], "--threads=-1");
|
t.deepEqual(runQueriesStub.firstCall.args[3], "--threads=-1");
|
||||||
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
t.deepEqual(runQueriesStub.firstCall.args[1], "--ram=3012");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -52,7 +52,6 @@ import {
|
|||||||
} from "./trap-caching";
|
} from "./trap-caching";
|
||||||
import * as uploadLib from "./upload-lib";
|
import * as uploadLib from "./upload-lib";
|
||||||
import { UploadResult } from "./upload-lib";
|
import { UploadResult } from "./upload-lib";
|
||||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
interface AnalysisStatusReport
|
interface AnalysisStatusReport
|
||||||
@@ -212,9 +211,7 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
|
|||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let uploadResults:
|
let uploadResult: UploadResult | undefined = undefined;
|
||||||
| Partial<Record<analyses.AnalysisKind, UploadResult>>
|
|
||||||
| undefined = undefined;
|
|
||||||
let runStats: QueriesStatusReport | undefined = undefined;
|
let runStats: QueriesStatusReport | undefined = undefined;
|
||||||
let config: Config | undefined = undefined;
|
let config: Config | undefined = undefined;
|
||||||
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
||||||
@@ -324,16 +321,10 @@ async function run() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||||
// Warn if the removed `add-snippets` input is used.
|
|
||||||
if (actionsUtil.getOptionalInput("add-snippets") !== undefined) {
|
|
||||||
logger.warning(
|
|
||||||
"The `add-snippets` input has been removed and no longer has any effect.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
runStats = await runQueries(
|
runStats = await runQueries(
|
||||||
outputDir,
|
outputDir,
|
||||||
memory,
|
memory,
|
||||||
|
util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")),
|
||||||
threads,
|
threads,
|
||||||
diffRangePackDir,
|
diffRangePackDir,
|
||||||
actionsUtil.getOptionalInput("category"),
|
actionsUtil.getOptionalInput("category"),
|
||||||
@@ -350,67 +341,31 @@ async function run() {
|
|||||||
}
|
}
|
||||||
core.setOutput("db-locations", dbLocations);
|
core.setOutput("db-locations", dbLocations);
|
||||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||||
const uploadKind = actionsUtil.getUploadValue(
|
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||||
actionsUtil.getOptionalInput("upload"),
|
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||||
);
|
if (isCodeScanningEnabled(config)) {
|
||||||
if (runStats) {
|
uploadResult = await uploadLib.uploadFiles(
|
||||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
|
||||||
const category = actionsUtil.getOptionalInput("category");
|
|
||||||
|
|
||||||
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
|
|
||||||
uploadResults = await postProcessAndUploadSarif(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
uploadKind,
|
|
||||||
checkoutPath,
|
|
||||||
outputDir,
|
outputDir,
|
||||||
category,
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
actionsUtil.getOptionalInput("category"),
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
analyses.CodeScanning,
|
||||||
);
|
);
|
||||||
} else if (uploadKind === "always") {
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
uploadResults = {};
|
|
||||||
|
|
||||||
if (isCodeScanningEnabled(config)) {
|
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning] =
|
|
||||||
await uploadLib.uploadFiles(
|
|
||||||
outputDir,
|
|
||||||
checkoutPath,
|
|
||||||
category,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
analyses.CodeScanning,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isCodeQualityEnabled(config)) {
|
|
||||||
uploadResults[analyses.AnalysisKind.CodeQuality] =
|
|
||||||
await uploadLib.uploadFiles(
|
|
||||||
outputDir,
|
|
||||||
checkoutPath,
|
|
||||||
category,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
analyses.CodeQuality,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uploadResults = {};
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the SARIF id outputs only if we have results for them, to avoid
|
if (isCodeQualityEnabled(config)) {
|
||||||
// having keys with empty values in the action output.
|
const analysis = analyses.CodeQuality;
|
||||||
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
|
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||||
core.setOutput(
|
outputDir,
|
||||||
"sarif-id",
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
actionsUtil.getOptionalInput("category"),
|
||||||
);
|
features,
|
||||||
}
|
logger,
|
||||||
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
|
analysis,
|
||||||
core.setOutput(
|
|
||||||
"quality-sarif-id",
|
|
||||||
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
|
|
||||||
);
|
);
|
||||||
|
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
@@ -438,11 +393,14 @@ async function run() {
|
|||||||
|
|
||||||
// Store dependency cache(s) if dependency caching is enabled.
|
// Store dependency cache(s) if dependency caching is enabled.
|
||||||
if (shouldStoreCache(config.dependencyCachingEnabled)) {
|
if (shouldStoreCache(config.dependencyCachingEnabled)) {
|
||||||
dependencyCacheResults = await uploadDependencyCaches(
|
const minimizeJavaJars = await features.getValue(
|
||||||
|
Feature.JavaMinimizeDependencyJars,
|
||||||
codeql,
|
codeql,
|
||||||
features,
|
);
|
||||||
|
dependencyCacheResults = await uploadDependencyCaches(
|
||||||
config,
|
config,
|
||||||
logger,
|
logger,
|
||||||
|
minimizeJavaJars,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -450,12 +408,12 @@ async function run() {
|
|||||||
if (util.isInTestMode()) {
|
if (util.isInTestMode()) {
|
||||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||||
} else if (
|
} else if (
|
||||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
|
uploadResult !== undefined &&
|
||||||
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
||||||
) {
|
) {
|
||||||
await uploadLib.waitForProcessing(
|
await uploadLib.waitForProcessing(
|
||||||
getRepositoryNwo(),
|
getRepositoryNwo(),
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
uploadResult.sarifID,
|
||||||
getActionsLogger(),
|
getActionsLogger(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -492,16 +450,13 @@ async function run() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (runStats && uploadResult) {
|
||||||
runStats !== undefined &&
|
|
||||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
|
|
||||||
) {
|
|
||||||
await sendStatusReport(
|
await sendStatusReport(
|
||||||
startedAt,
|
startedAt,
|
||||||
config,
|
config,
|
||||||
{
|
{
|
||||||
...runStats,
|
...runStats,
|
||||||
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
|
...uploadResult.statusReport,
|
||||||
},
|
},
|
||||||
undefined,
|
undefined,
|
||||||
trapCacheUploadTime,
|
trapCacheUploadTime,
|
||||||
@@ -511,7 +466,7 @@ async function run() {
|
|||||||
dependencyCacheResults,
|
dependencyCacheResults,
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
} else if (runStats !== undefined) {
|
} else if (runStats) {
|
||||||
await sendStatusReport(
|
await sendStatusReport(
|
||||||
startedAt,
|
startedAt,
|
||||||
config,
|
config,
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ import * as path from "path";
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
|
import * as actionsUtil from "./actions-util";
|
||||||
import { CodeQuality, CodeScanning } from "./analyses";
|
import { CodeQuality, CodeScanning } from "./analyses";
|
||||||
import {
|
import {
|
||||||
|
exportedForTesting,
|
||||||
runQueries,
|
runQueries,
|
||||||
defaultSuites,
|
defaultSuites,
|
||||||
resolveQuerySuiteAlias,
|
resolveQuerySuiteAlias,
|
||||||
@@ -37,6 +39,7 @@ test("status report fields", async (t) => {
|
|||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
|
|
||||||
const memoryFlag = "";
|
const memoryFlag = "";
|
||||||
|
const addSnippetsFlag = "";
|
||||||
const threadsFlag = "";
|
const threadsFlag = "";
|
||||||
sinon.stub(uploadLib, "validateSarifFileSchema");
|
sinon.stub(uploadLib, "validateSarifFileSchema");
|
||||||
|
|
||||||
@@ -102,6 +105,7 @@ test("status report fields", async (t) => {
|
|||||||
const statusReport = await runQueries(
|
const statusReport = await runQueries(
|
||||||
tmpDir,
|
tmpDir,
|
||||||
memoryFlag,
|
memoryFlag,
|
||||||
|
addSnippetsFlag,
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
undefined,
|
undefined,
|
||||||
undefined,
|
undefined,
|
||||||
@@ -127,6 +131,204 @@ test("status report fields", async (t) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("checkout_path")
|
||||||
|
.returns("/checkout/path");
|
||||||
|
return exportedForTesting.getDiffRanges(
|
||||||
|
{
|
||||||
|
filename: "test.txt",
|
||||||
|
changes,
|
||||||
|
patch: patch?.join("\n"),
|
||||||
|
},
|
||||||
|
getRunnerLogger(true),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test("getDiffRanges: file unchanged", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(0, undefined);
|
||||||
|
t.deepEqual(diffRanges, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: file diff too large", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 0,
|
||||||
|
endLine: 0,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,6 +50,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,8 +50,6 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
"-2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,7 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 53,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,9 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
" c",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 53,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 55,
|
||||||
|
endLine: 55,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,7 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
" d",
|
||||||
|
"-2",
|
||||||
|
"+3",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
"+4",
|
||||||
|
"+5",
|
||||||
|
" g",
|
||||||
|
" h",
|
||||||
|
" i",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 54,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 57,
|
||||||
|
endLine: 58,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,6 +50,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
"@@ -130,6 +150,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 153,
|
||||||
|
endLine: 154,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: no diff context lines", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 50,
|
||||||
|
endLine: 51,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||||
|
t.deepEqual(diffRanges, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test("resolveQuerySuiteAlias", (t) => {
|
test("resolveQuerySuiteAlias", (t) => {
|
||||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||||
for (const suite of defaultSuites) {
|
for (const suite of defaultSuites) {
|
||||||
|
|||||||
286
src/analyze.ts
286
src/analyze.ts
@@ -3,10 +3,16 @@ import * as path from "path";
|
|||||||
import { performance } from "perf_hooks";
|
import { performance } from "perf_hooks";
|
||||||
|
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
|
import * as del from "del";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
|
import {
|
||||||
|
getRequiredInput,
|
||||||
|
getTemporaryDirectory,
|
||||||
|
PullRequestBranches,
|
||||||
|
} from "./actions-util";
|
||||||
import * as analyses from "./analyses";
|
import * as analyses from "./analyses";
|
||||||
|
import { getApiClient } from "./api-client";
|
||||||
import { setupCppAutobuild } from "./autobuild";
|
import { setupCppAutobuild } from "./autobuild";
|
||||||
import { type CodeQL } from "./codeql";
|
import { type CodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
@@ -15,13 +21,13 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
|||||||
import {
|
import {
|
||||||
DiffThunkRange,
|
DiffThunkRange,
|
||||||
writeDiffRangesJsonFile,
|
writeDiffRangesJsonFile,
|
||||||
getPullRequestEditedDiffRanges,
|
|
||||||
} from "./diff-informed-analysis-utils";
|
} from "./diff-informed-analysis-utils";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||||
import { KnownLanguage, Language } from "./languages";
|
import { KnownLanguage, Language } from "./languages";
|
||||||
import { Logger, withGroupAsync } from "./logging";
|
import { Logger, withGroupAsync } from "./logging";
|
||||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||||
|
import { getRepositoryNwoFromEnv } from "./repository";
|
||||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||||
import { endTracingForCluster } from "./tracer-config";
|
import { endTracingForCluster } from "./tracer-config";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
@@ -38,26 +44,89 @@ export class CodeQLAnalysisError extends Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type KnownLanguageKey = keyof typeof KnownLanguage;
|
export interface QueriesStatusReport {
|
||||||
|
|
||||||
type RunQueriesDurationStatusReport = {
|
|
||||||
/**
|
/**
|
||||||
* Time taken in ms to run queries for the language (or undefined if this language was not analyzed).
|
* Time taken in ms to run queries for actions (or undefined if this language was not analyzed).
|
||||||
*
|
*
|
||||||
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
* taken to run _all_ the queries.
|
* taken to run _all_ the queries.
|
||||||
*/
|
*/
|
||||||
[L in KnownLanguageKey as `analyze_builtin_queries_${L}_duration_ms`]?: number;
|
analyze_builtin_queries_actions_duration_ms?: number;
|
||||||
};
|
/**
|
||||||
|
* Time taken in ms to run queries for cpp (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_cpp_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for csharp (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_csharp_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for go (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_go_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for java (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_java_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for javascript (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_javascript_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for python (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_python_duration_ms?: number;
|
||||||
|
/**
|
||||||
|
* Time taken in ms to run queries for ruby (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_ruby_duration_ms?: number;
|
||||||
|
/** Time taken in ms to run queries for swift (or undefined if this language was not analyzed).
|
||||||
|
*
|
||||||
|
* The "builtin" designation is now outdated with the move to CLI config parsing: this is the time
|
||||||
|
* taken to run _all_ the queries.
|
||||||
|
*/
|
||||||
|
analyze_builtin_queries_swift_duration_ms?: number;
|
||||||
|
|
||||||
type InterpretResultsDurationStatusReport = {
|
/** Time taken in ms to interpret results for actions (or undefined if this language was not analyzed). */
|
||||||
/** Time taken in ms to interpret results for the language (or undefined if this language was not analyzed). */
|
interpret_results_actions_duration_ms?: number;
|
||||||
[L in KnownLanguageKey as `interpret_results_${L}_duration_ms`]?: number;
|
/** Time taken in ms to interpret results for cpp (or undefined if this language was not analyzed). */
|
||||||
};
|
interpret_results_cpp_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for csharp (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_csharp_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for go (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_go_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for java (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_java_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for javascript (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_javascript_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for python (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_python_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for ruby (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_ruby_duration_ms?: number;
|
||||||
|
/** Time taken in ms to interpret results for swift (or undefined if this language was not analyzed). */
|
||||||
|
interpret_results_swift_duration_ms?: number;
|
||||||
|
|
||||||
export interface QueriesStatusReport
|
|
||||||
extends RunQueriesDurationStatusReport,
|
|
||||||
InterpretResultsDurationStatusReport {
|
|
||||||
/**
|
/**
|
||||||
* Whether the analysis is diff-informed (in the sense that the action generates a diff-range data
|
* Whether the analysis is diff-informed (in the sense that the action generates a diff-range data
|
||||||
* extension for the analysis, regardless of whether the data extension is actually used by queries).
|
* extension for the analysis, regardless of whether the data extension is actually used by queries).
|
||||||
@@ -244,6 +313,185 @@ export async function setupDiffInformedQueryRun(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the file line ranges that were added or modified in the pull request.
|
||||||
|
*
|
||||||
|
* @param branches The base and head branches of the pull request.
|
||||||
|
* @param logger
|
||||||
|
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||||
|
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||||
|
* added or modified range in that file. Returns `undefined` if the action was
|
||||||
|
* not triggered by a pull request or if there was an error.
|
||||||
|
*/
|
||||||
|
async function getPullRequestEditedDiffRanges(
|
||||||
|
branches: PullRequestBranches,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<DiffThunkRange[] | undefined> {
|
||||||
|
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||||
|
if (fileDiffs === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (fileDiffs.length >= 300) {
|
||||||
|
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||||
|
// we see that many changed files, it is possible that there could be more,
|
||||||
|
// with the rest being truncated. In this case, we should not attempt to
|
||||||
|
// compute the diff ranges, as the result would be incomplete.
|
||||||
|
logger.warning(
|
||||||
|
`Cannot retrieve the full diff because there are too many ` +
|
||||||
|
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const results: DiffThunkRange[] = [];
|
||||||
|
for (const filediff of fileDiffs) {
|
||||||
|
const diffRanges = getDiffRanges(filediff, logger);
|
||||||
|
if (diffRanges === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
results.push(...diffRanges);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This interface is an abbreviated version of the file diff object returned by
|
||||||
|
* the GitHub API.
|
||||||
|
*/
|
||||||
|
interface FileDiff {
|
||||||
|
filename: string;
|
||||||
|
changes: number;
|
||||||
|
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||||
|
// or if the file is unchanged.
|
||||||
|
patch?: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFileDiffsWithBasehead(
|
||||||
|
branches: PullRequestBranches,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<FileDiff[] | undefined> {
|
||||||
|
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||||
|
// to GITHUB_REPOSITORY.
|
||||||
|
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||||
|
"CODE_SCANNING_REPOSITORY",
|
||||||
|
"GITHUB_REPOSITORY",
|
||||||
|
);
|
||||||
|
const basehead = `${branches.base}...${branches.head}`;
|
||||||
|
try {
|
||||||
|
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||||
|
{
|
||||||
|
owner: repositoryNwo.owner,
|
||||||
|
repo: repositoryNwo.repo,
|
||||||
|
basehead,
|
||||||
|
per_page: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
logger.debug(
|
||||||
|
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||||
|
`\n${JSON.stringify(response, null, 2)}`,
|
||||||
|
);
|
||||||
|
return response.data.files;
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.status) {
|
||||||
|
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||||
|
logger.debug(
|
||||||
|
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||||
|
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||||
|
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDiffRanges(
|
||||||
|
fileDiff: FileDiff,
|
||||||
|
logger: Logger,
|
||||||
|
): DiffThunkRange[] | undefined {
|
||||||
|
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||||
|
// uses forward slashes as the path separator, so on Windows we need to
|
||||||
|
// replace any backslashes with forward slashes.
|
||||||
|
const filename = path
|
||||||
|
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||||
|
.replaceAll(path.sep, "/");
|
||||||
|
|
||||||
|
if (fileDiff.patch === undefined) {
|
||||||
|
if (fileDiff.changes === 0) {
|
||||||
|
// There are situations where a changed file legitimately has no diff.
|
||||||
|
// For example, the file may be a binary file, or that the file may have
|
||||||
|
// been renamed with no changes to its contents. In these cases, the
|
||||||
|
// file would be reported as having 0 changes, and we can return an empty
|
||||||
|
// array to indicate no diff range in this file.
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
// If a file is reported to have nonzero changes but no patch, that may be
|
||||||
|
// due to the file diff being too large. In this case, we should fall back
|
||||||
|
// to a special diff range that covers the entire file.
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
path: filename,
|
||||||
|
startLine: 0,
|
||||||
|
endLine: 0,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// The 1-based file line number of the current line
|
||||||
|
let currentLine = 0;
|
||||||
|
// The 1-based file line number that starts the current range of added lines
|
||||||
|
let additionRangeStartLine: number | undefined = undefined;
|
||||||
|
const diffRanges: DiffThunkRange[] = [];
|
||||||
|
|
||||||
|
const diffLines = fileDiff.patch.split("\n");
|
||||||
|
// Adding a fake context line at the end ensures that the following loop will
|
||||||
|
// always terminate the last range of added lines.
|
||||||
|
diffLines.push(" ");
|
||||||
|
|
||||||
|
for (const diffLine of diffLines) {
|
||||||
|
if (diffLine.startsWith("-")) {
|
||||||
|
// Ignore deletions completely -- we do not even want to consider them when
|
||||||
|
// calculating consecutive ranges of added lines.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith("+")) {
|
||||||
|
if (additionRangeStartLine === undefined) {
|
||||||
|
additionRangeStartLine = currentLine;
|
||||||
|
}
|
||||||
|
currentLine++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (additionRangeStartLine !== undefined) {
|
||||||
|
// Any line that does not start with a "+" or "-" terminates the current
|
||||||
|
// range of added lines.
|
||||||
|
diffRanges.push({
|
||||||
|
path: filename,
|
||||||
|
startLine: additionRangeStartLine,
|
||||||
|
endLine: currentLine - 1,
|
||||||
|
});
|
||||||
|
additionRangeStartLine = undefined;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith("@@ ")) {
|
||||||
|
// A new hunk header line resets the current line number.
|
||||||
|
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||||
|
if (match === null) {
|
||||||
|
logger.warning(
|
||||||
|
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
currentLine = parseInt(match[1], 10);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith(" ")) {
|
||||||
|
// An unchanged context line advances the current line number.
|
||||||
|
currentLine++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return diffRanges;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an extension pack in the temporary directory that contains the file
|
* Create an extension pack in the temporary directory that contains the file
|
||||||
* line ranges that were added or modified in the pull request.
|
* line ranges that were added or modified in the pull request.
|
||||||
@@ -373,6 +621,7 @@ export function addSarifExtension(
|
|||||||
export async function runQueries(
|
export async function runQueries(
|
||||||
sarifFolder: string,
|
sarifFolder: string,
|
||||||
memoryFlag: string,
|
memoryFlag: string,
|
||||||
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
diffRangePackDir: string | undefined,
|
diffRangePackDir: string | undefined,
|
||||||
automationDetailsId: string | undefined,
|
automationDetailsId: string | undefined,
|
||||||
@@ -562,6 +811,7 @@ export async function runQueries(
|
|||||||
databasePath,
|
databasePath,
|
||||||
queries,
|
queries,
|
||||||
sarifFile,
|
sarifFile,
|
||||||
|
addSnippetsFlag,
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
enableDebugLogging ? "-vv" : "-v",
|
enableDebugLogging ? "-vv" : "-v",
|
||||||
sarifRunPropertyFlag,
|
sarifRunPropertyFlag,
|
||||||
@@ -605,7 +855,7 @@ export async function runFinalize(
|
|||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<DatabaseCreationTimings> {
|
): Promise<DatabaseCreationTimings> {
|
||||||
try {
|
try {
|
||||||
await fs.promises.rm(outputDir, { force: true, recursive: true });
|
await del.deleteAsync(outputDir, { force: true });
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error?.code !== "ENOENT") {
|
if (error?.code !== "ENOENT") {
|
||||||
throw error;
|
throw error;
|
||||||
@@ -672,3 +922,7 @@ export async function warnIfGoInstalledAfterInit(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const exportedForTesting = {
|
||||||
|
getDiffRanges,
|
||||||
|
};
|
||||||
|
|||||||
@@ -169,32 +169,4 @@ test("wrapApiConfigurationError correctly wraps specific configuration errors",
|
|||||||
res,
|
res,
|
||||||
new util.ConfigurationError("Resource not accessible by integration"),
|
new util.ConfigurationError("Resource not accessible by integration"),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Enablement errors.
|
|
||||||
const enablementErrorMessages = [
|
|
||||||
"Code Security must be enabled for this repository to use code scanning",
|
|
||||||
"Advanced Security must be enabled for this repository to use code scanning",
|
|
||||||
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
|
|
||||||
];
|
|
||||||
const transforms = [
|
|
||||||
(msg: string) => msg,
|
|
||||||
(msg: string) => msg.toLowerCase(),
|
|
||||||
(msg: string) => msg.toLocaleUpperCase(),
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const enablementErrorMessage of enablementErrorMessages) {
|
|
||||||
for (const transform of transforms) {
|
|
||||||
const enablementError = new util.HTTPError(
|
|
||||||
transform(enablementErrorMessage),
|
|
||||||
403,
|
|
||||||
);
|
|
||||||
res = api.wrapApiConfigurationError(enablementError);
|
|
||||||
t.deepEqual(
|
|
||||||
res,
|
|
||||||
new util.ConfigurationError(
|
|
||||||
api.getFeatureEnablementError(enablementError.message),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,17 +1,18 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as githubUtils from "@actions/github/lib/utils";
|
import * as githubUtils from "@actions/github/lib/utils";
|
||||||
import * as retry from "@octokit/plugin-retry";
|
import * as retry from "@octokit/plugin-retry";
|
||||||
|
import consoleLogLevel from "console-log-level";
|
||||||
|
|
||||||
import { getActionVersion, getRequiredInput } from "./actions-util";
|
import { getActionVersion, getRequiredInput } from "./actions-util";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
import {
|
import {
|
||||||
asHTTPError,
|
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
GITHUB_DOTCOM_URL,
|
GITHUB_DOTCOM_URL,
|
||||||
GitHubVariant,
|
GitHubVariant,
|
||||||
GitHubVersion,
|
GitHubVersion,
|
||||||
|
isHTTPError,
|
||||||
parseGitHubUrl,
|
parseGitHubUrl,
|
||||||
parseMatrixInput,
|
parseMatrixInput,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
@@ -49,12 +50,7 @@ function createApiClientWithDetails(
|
|||||||
githubUtils.getOctokitOptions(auth, {
|
githubUtils.getOctokitOptions(auth, {
|
||||||
baseUrl: apiDetails.apiURL,
|
baseUrl: apiDetails.apiURL,
|
||||||
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
userAgent: `CodeQL-Action/${getActionVersion()}`,
|
||||||
log: {
|
log: consoleLogLevel({ level: "debug" }),
|
||||||
debug: core.debug,
|
|
||||||
info: core.info,
|
|
||||||
warn: core.warning,
|
|
||||||
error: core.error,
|
|
||||||
},
|
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -283,49 +279,23 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function isEnablementError(msg: string) {
|
|
||||||
return [
|
|
||||||
/Code Security must be enabled/i,
|
|
||||||
/Advanced Security must be enabled/i,
|
|
||||||
/Code Scanning is not enabled/i,
|
|
||||||
].some((pattern) => pattern.test(msg));
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Move to `error-messages.ts` after refactoring import order to avoid cycle
|
|
||||||
// since `error-messages.ts` currently depends on this file.
|
|
||||||
export function getFeatureEnablementError(message: string): string {
|
|
||||||
return `Please verify that the necessary features are enabled: ${message}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function wrapApiConfigurationError(e: unknown) {
|
export function wrapApiConfigurationError(e: unknown) {
|
||||||
const httpError = asHTTPError(e);
|
if (isHTTPError(e)) {
|
||||||
if (httpError !== undefined) {
|
|
||||||
if (
|
if (
|
||||||
[
|
e.message.includes("API rate limit exceeded for installation") ||
|
||||||
/API rate limit exceeded/,
|
e.message.includes("commit not found") ||
|
||||||
/commit not found/,
|
e.message.includes("Resource not accessible by integration") ||
|
||||||
/Resource not accessible by integration/,
|
/ref .* not found in this repository/.test(e.message)
|
||||||
/ref .* not found in this repository/,
|
|
||||||
].some((pattern) => pattern.test(httpError.message))
|
|
||||||
) {
|
) {
|
||||||
return new ConfigurationError(httpError.message);
|
return new ConfigurationError(e.message);
|
||||||
}
|
} else if (
|
||||||
if (
|
e.message.includes("Bad credentials") ||
|
||||||
httpError.message.includes("Bad credentials") ||
|
e.message.includes("Not Found")
|
||||||
httpError.message.includes("Not Found")
|
|
||||||
) {
|
) {
|
||||||
return new ConfigurationError(
|
return new ConfigurationError(
|
||||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (httpError.status === 403 && isEnablementError(httpError.message)) {
|
|
||||||
return new ConfigurationError(
|
|
||||||
getFeatureEnablementError(httpError.message),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (httpError.status === 429) {
|
|
||||||
return new ConfigurationError("API rate limit exceeded");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
import * as crypto from "crypto";
|
|
||||||
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { getOptionalInput, isDefaultSetup } from "./actions-util";
|
import { getOptionalInput, isDefaultSetup } from "./actions-util";
|
||||||
@@ -73,33 +71,6 @@ export function getCachingKind(input: string | undefined): CachingKind {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The length to which `createCacheKeyHash` truncates hash strings.
|
|
||||||
export const cacheKeyHashLength = 16;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a SHA-256 hash of the cache key components to ensure uniqueness
|
|
||||||
* while keeping the cache key length manageable.
|
|
||||||
*
|
|
||||||
* @param components Object containing all components that should influence cache key uniqueness
|
|
||||||
* @returns A short SHA-256 hash (first 16 characters) of the components
|
|
||||||
*/
|
|
||||||
export function createCacheKeyHash(components: Record<string, any>): string {
|
|
||||||
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify
|
|
||||||
//
|
|
||||||
// "Properties are visited using the same algorithm as Object.keys(), which
|
|
||||||
// has a well-defined order and is stable across implementations. For example,
|
|
||||||
// JSON.stringify on the same object will always produce the same string, and
|
|
||||||
// JSON.parse(JSON.stringify(obj)) would produce an object with the same key
|
|
||||||
// ordering as the original (assuming the object is completely
|
|
||||||
// JSON-serializable)."
|
|
||||||
const componentsJson = JSON.stringify(components);
|
|
||||||
return crypto
|
|
||||||
.createHash("sha256")
|
|
||||||
.update(componentsJson)
|
|
||||||
.digest("hex")
|
|
||||||
.substring(0, cacheKeyHashLength);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Determines whether dependency caching is enabled. */
|
/** Determines whether dependency caching is enabled. */
|
||||||
export function getDependencyCachingEnabled(): CachingKind {
|
export function getDependencyCachingEnabled(): CachingKind {
|
||||||
// If the workflow specified something always respect that
|
// If the workflow specified something always respect that
|
||||||
|
|||||||
@@ -310,20 +310,6 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
|
|||||||
t.true(wrappedError instanceof ConfigurationError);
|
t.true(wrappedError instanceof ConfigurationError);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("wrapCliConfigurationError - unknown query file", (t) => {
|
|
||||||
const commandError = new CommandInvocationError(
|
|
||||||
"codeql",
|
|
||||||
["database", "init"],
|
|
||||||
2,
|
|
||||||
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
|
|
||||||
);
|
|
||||||
const cliError = new CliError(commandError);
|
|
||||||
|
|
||||||
const wrappedError = wrapCliConfigurationError(cliError);
|
|
||||||
|
|
||||||
t.true(wrappedError instanceof ConfigurationError);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
||||||
const commandError = new CommandInvocationError(
|
const commandError = new CommandInvocationError(
|
||||||
"codeql",
|
"codeql",
|
||||||
|
|||||||
@@ -264,9 +264,6 @@ export const cliErrorsConfig: Record<
|
|||||||
new RegExp(
|
new RegExp(
|
||||||
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
||||||
),
|
),
|
||||||
new RegExp(
|
|
||||||
"is not a .ql file, .qls file, a directory, or a query pack specification.",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
[CliConfigErrorCategory.PackMissingAuth]: {
|
[CliConfigErrorCategory.PackMissingAuth]: {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
|||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as toolcache from "@actions/tool-cache";
|
import * as toolcache from "@actions/tool-cache";
|
||||||
import test, { ExecutionContext } from "ava";
|
import test, { ExecutionContext } from "ava";
|
||||||
|
import * as del from "del";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
import nock from "nock";
|
import nock from "nock";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
@@ -35,6 +36,7 @@ import {
|
|||||||
createTestConfig,
|
createTestConfig,
|
||||||
} from "./testing-utils";
|
} from "./testing-utils";
|
||||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||||
|
import { ToolsFeature } from "./tools-features";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { initializeEnvironment } from "./util";
|
import { initializeEnvironment } from "./util";
|
||||||
|
|
||||||
@@ -556,7 +558,7 @@ const injectedConfigMacro = test.macro({
|
|||||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||||
t.deepEqual(augmentedConfig, expectedConfig);
|
t.deepEqual(augmentedConfig, expectedConfig);
|
||||||
|
|
||||||
await fs.promises.rm(configFile, { force: true });
|
await del.deleteAsync(configFile, { force: true });
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -868,6 +870,84 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0", {
|
||||||
|
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
|
||||||
|
}),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
flagPassed: false,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
flagPassed: true,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.GHES,
|
||||||
|
version: "3.10.0",
|
||||||
|
},
|
||||||
|
flagPassed: true,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const {
|
||||||
|
codeqlVersion,
|
||||||
|
flagPassed,
|
||||||
|
githubVersion,
|
||||||
|
negativeFlagPassed,
|
||||||
|
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
|
||||||
|
test(`database interpret-results passes ${
|
||||||
|
flagPassed
|
||||||
|
? "--new-analysis-summary"
|
||||||
|
: negativeFlagPassed
|
||||||
|
? "--no-new-analysis-summary"
|
||||||
|
: "nothing"
|
||||||
|
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
|
||||||
|
util.GitHubVariant[githubVersion.type]
|
||||||
|
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||||
|
// io throws because of the test CodeQL object.
|
||||||
|
sinon.stub(io, "which").resolves("");
|
||||||
|
await codeqlObject.databaseInterpretResults(
|
||||||
|
"",
|
||||||
|
[],
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"-v",
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
|
||||||
|
createFeatures([]),
|
||||||
|
);
|
||||||
|
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
|
||||||
|
t.is(
|
||||||
|
actualArgs.includes("--new-analysis-summary"),
|
||||||
|
flagPassed,
|
||||||
|
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
|
||||||
|
);
|
||||||
|
t.is(
|
||||||
|
actualArgs.includes("--no-new-analysis-summary"),
|
||||||
|
negativeFlagPassed,
|
||||||
|
`--no-new-analysis-summary should${
|
||||||
|
negativeFlagPassed ? "" : "n't"
|
||||||
|
} be passed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
test("runTool summarizes several fatal errors", async (t) => {
|
test("runTool summarizes several fatal errors", async (t) => {
|
||||||
const heapError =
|
const heapError =
|
||||||
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
||||||
@@ -1045,7 +1125,7 @@ test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OP
|
|||||||
);
|
);
|
||||||
t.truthy(configArg, "Should have injected a codescanning config");
|
t.truthy(configArg, "Should have injected a codescanning config");
|
||||||
const configFile = configArg!.split("=")[1];
|
const configFile = configArg!.split("=")[1];
|
||||||
await fs.promises.rm(configFile, { force: true });
|
await del.deleteAsync(configFile, { force: true });
|
||||||
});
|
});
|
||||||
|
|
||||||
export function stubToolRunnerConstructor(
|
export function stubToolRunnerConstructor(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as path from "path";
|
|||||||
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
|
import { RequestError } from "@octokit/request-error";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@@ -167,6 +168,7 @@ export interface CodeQL {
|
|||||||
databasePath: string,
|
databasePath: string,
|
||||||
querySuitePaths: string[] | undefined,
|
querySuitePaths: string[] | undefined,
|
||||||
sarifFile: string,
|
sarifFile: string,
|
||||||
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
verbosityFlag: string | undefined,
|
verbosityFlag: string | undefined,
|
||||||
sarifRunPropertyFlag: string | undefined,
|
sarifRunPropertyFlag: string | undefined,
|
||||||
@@ -266,7 +268,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
|
|||||||
* The version flags below can be used to conditionally enable certain features
|
* The version flags below can be used to conditionally enable certain features
|
||||||
* on versions newer than this.
|
* on versions newer than this.
|
||||||
*/
|
*/
|
||||||
const CODEQL_MINIMUM_VERSION = "2.17.6";
|
const CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||||
@@ -369,11 +371,11 @@ export async function setupCodeQL(
|
|||||||
toolsVersion,
|
toolsVersion,
|
||||||
zstdAvailability,
|
zstdAvailability,
|
||||||
};
|
};
|
||||||
} catch (rawError) {
|
} catch (e) {
|
||||||
const e = api.wrapApiConfigurationError(rawError);
|
|
||||||
const ErrorClass =
|
const ErrorClass =
|
||||||
e instanceof util.ConfigurationError ||
|
e instanceof util.ConfigurationError ||
|
||||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
|
||||||
|
(e instanceof RequestError && e.status === 429) // rate limited
|
||||||
? util.ConfigurationError
|
? util.ConfigurationError
|
||||||
: Error;
|
: Error;
|
||||||
|
|
||||||
@@ -816,6 +818,7 @@ export async function getCodeQLForCmd(
|
|||||||
databasePath: string,
|
databasePath: string,
|
||||||
querySuitePaths: string[] | undefined,
|
querySuitePaths: string[] | undefined,
|
||||||
sarifFile: string,
|
sarifFile: string,
|
||||||
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
verbosityFlag: string,
|
verbosityFlag: string,
|
||||||
sarifRunPropertyFlag: string | undefined,
|
sarifRunPropertyFlag: string | undefined,
|
||||||
@@ -834,6 +837,7 @@ export async function getCodeQLForCmd(
|
|||||||
"--format=sarif-latest",
|
"--format=sarif-latest",
|
||||||
verbosityFlag,
|
verbosityFlag,
|
||||||
`--output=${sarifFile}`,
|
`--output=${sarifFile}`,
|
||||||
|
addSnippetsFlag,
|
||||||
"--print-diagnostics-summary",
|
"--print-diagnostics-summary",
|
||||||
"--print-metrics-summary",
|
"--print-metrics-summary",
|
||||||
"--sarif-add-baseline-file-info",
|
"--sarif-add-baseline-file-info",
|
||||||
@@ -857,6 +861,14 @@ export async function getCodeQLForCmd(
|
|||||||
} else {
|
} else {
|
||||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||||
}
|
}
|
||||||
|
if (
|
||||||
|
!isSupportedToolsFeature(
|
||||||
|
await this.getVersion(),
|
||||||
|
ToolsFeature.AnalysisSummaryV2IsDefault,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
codeqlArgs.push("--new-analysis-summary");
|
||||||
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
if (querySuitePaths) {
|
if (querySuitePaths) {
|
||||||
codeqlArgs.push(...querySuitePaths);
|
codeqlArgs.push(...querySuitePaths);
|
||||||
@@ -1071,11 +1083,8 @@ export async function getCodeQLForCmd(
|
|||||||
/**
|
/**
|
||||||
* Gets the options for `path` of `options` as an array of extra option strings.
|
* Gets the options for `path` of `options` as an array of extra option strings.
|
||||||
*
|
*
|
||||||
* @param paths The CLI command components to get extra options for.
|
* @param ignoringOptions Options that should be ignored, for example because they have already
|
||||||
* @param args Additional arguments for this function.
|
* been passed and it is an error to pass them more than once.
|
||||||
* @param args.ignoringOptions
|
|
||||||
* Options that should be ignored, for example because they have already
|
|
||||||
* been passed and it is an error to pass them more than once.
|
|
||||||
*/
|
*/
|
||||||
function getExtraOptionsFromEnv(
|
function getExtraOptionsFromEnv(
|
||||||
paths: string[],
|
paths: string[],
|
||||||
@@ -1157,9 +1166,8 @@ async function runCli(
|
|||||||
/**
|
/**
|
||||||
* Writes the code scanning configuration that is to be used by the CLI.
|
* Writes the code scanning configuration that is to be used by the CLI.
|
||||||
*
|
*
|
||||||
* @param config The CodeQL Action state to write.
|
* @param codeql The CodeQL object to use.
|
||||||
* @param logger The logger to use.
|
* @param config The CodeQL Action state to use.
|
||||||
*
|
|
||||||
* @returns The path to the generated user configuration file.
|
* @returns The path to the generated user configuration file.
|
||||||
*/
|
*/
|
||||||
async function writeCodeScanningConfigFile(
|
async function writeCodeScanningConfigFile(
|
||||||
|
|||||||
@@ -49,9 +49,10 @@ function createTestInitConfigInputs(
|
|||||||
return Object.assign(
|
return Object.assign(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
analysisKinds: [AnalysisKind.CodeScanning],
|
analysisKindsInput: "code-scanning",
|
||||||
languagesInput: undefined,
|
languagesInput: undefined,
|
||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
|
qualityQueriesInput: undefined,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
configFile: undefined,
|
configFile: undefined,
|
||||||
dbLocation: undefined,
|
dbLocation: undefined,
|
||||||
@@ -148,7 +149,6 @@ test("load empty config", async (t) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
@@ -188,9 +188,8 @@ test("load code quality config", async (t) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
analysisKindsInput: "code-quality",
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -273,9 +272,8 @@ test("initActionState doesn't throw if there are queries configured in the repos
|
|||||||
|
|
||||||
await t.notThrowsAsync(async () => {
|
await t.notThrowsAsync(async () => {
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
analysisKindsInput: "code-quality",
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -312,7 +310,6 @@ test("loading a saved config produces the same config", async (t) => {
|
|||||||
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
||||||
|
|
||||||
const config1 = await configUtils.initConfig(
|
const config1 = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: "javascript,python",
|
languagesInput: "javascript,python",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -364,7 +361,6 @@ test("loading config with version mismatch throws", async (t) => {
|
|||||||
.returns("does-not-exist");
|
.returns("does-not-exist");
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: "javascript,python",
|
languagesInput: "javascript,python",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -393,7 +389,6 @@ test("load input outside of workspace", async (t) => {
|
|||||||
return await withTmpDir(async (tempDir) => {
|
return await withTmpDir(async (tempDir) => {
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: "../input",
|
configFile: "../input",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -421,7 +416,6 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile,
|
configFile,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -450,7 +444,6 @@ test("load non-existent input", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile,
|
configFile,
|
||||||
@@ -534,7 +527,6 @@ test("load non-empty input", async (t) => {
|
|||||||
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
||||||
|
|
||||||
const actualConfig = await configUtils.initConfig(
|
const actualConfig = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
buildModeInput: "none",
|
buildModeInput: "none",
|
||||||
@@ -591,7 +583,6 @@ test("Using config input and file together, config input should be used.", async
|
|||||||
const languagesInput = "javascript";
|
const languagesInput = "javascript";
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile: configFilePath,
|
configFile: configFilePath,
|
||||||
@@ -642,7 +633,6 @@ test("API client used when reading remote config", async (t) => {
|
|||||||
const languagesInput = "javascript";
|
const languagesInput = "javascript";
|
||||||
|
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile,
|
configFile,
|
||||||
@@ -663,7 +653,6 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
|||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: repoReference,
|
configFile: repoReference,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -692,7 +681,6 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
|||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: repoReference,
|
configFile: repoReference,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -722,7 +710,6 @@ test("No detected languages", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
tempDir,
|
tempDir,
|
||||||
codeql,
|
codeql,
|
||||||
@@ -745,7 +732,6 @@ test("Unknown languages", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -873,62 +859,71 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
|
|||||||
expectedLanguages: ["javascript"],
|
expectedLanguages: ["javascript"],
|
||||||
},
|
},
|
||||||
].forEach((args) => {
|
].forEach((args) => {
|
||||||
test(`getLanguages: ${args.name}`, async (t) => {
|
for (const resolveSupportedLanguagesUsingCli of [true, false]) {
|
||||||
const mockRequest = mockLanguagesInRepo(args.languagesInRepository);
|
test(`getLanguages${resolveSupportedLanguagesUsingCli ? " (supported languages via CLI)" : ""}: ${args.name}`, async (t) => {
|
||||||
const stubExtractorEntry = {
|
const features = createFeatures(
|
||||||
extractor_root: "",
|
resolveSupportedLanguagesUsingCli
|
||||||
};
|
? [Feature.ResolveSupportedLanguagesUsingCli]
|
||||||
const codeQL = createStubCodeQL({
|
: [],
|
||||||
betterResolveLanguages: (options) =>
|
);
|
||||||
Promise.resolve({
|
const mockRequest = mockLanguagesInRepo(args.languagesInRepository);
|
||||||
aliases: {
|
const stubExtractorEntry = {
|
||||||
"c#": KnownLanguage.csharp,
|
extractor_root: "",
|
||||||
c: KnownLanguage.cpp,
|
};
|
||||||
kotlin: KnownLanguage.java,
|
const codeQL = createStubCodeQL({
|
||||||
typescript: KnownLanguage.javascript,
|
betterResolveLanguages: (options) =>
|
||||||
},
|
Promise.resolve({
|
||||||
extractors: {
|
aliases: {
|
||||||
cpp: [stubExtractorEntry],
|
"c#": KnownLanguage.csharp,
|
||||||
csharp: [stubExtractorEntry],
|
c: KnownLanguage.cpp,
|
||||||
java: [stubExtractorEntry],
|
kotlin: KnownLanguage.java,
|
||||||
javascript: [stubExtractorEntry],
|
typescript: KnownLanguage.javascript,
|
||||||
python: [stubExtractorEntry],
|
},
|
||||||
...(options?.filterToLanguagesWithQueries
|
extractors: {
|
||||||
? {}
|
cpp: [stubExtractorEntry],
|
||||||
: {
|
csharp: [stubExtractorEntry],
|
||||||
html: [stubExtractorEntry],
|
java: [stubExtractorEntry],
|
||||||
}),
|
javascript: [stubExtractorEntry],
|
||||||
},
|
python: [stubExtractorEntry],
|
||||||
}),
|
...(options?.filterToLanguagesWithQueries
|
||||||
|
? {}
|
||||||
|
: {
|
||||||
|
html: [stubExtractorEntry],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (args.expectedLanguages) {
|
||||||
|
// happy path
|
||||||
|
const actualLanguages = await configUtils.getLanguages(
|
||||||
|
codeQL,
|
||||||
|
args.languagesInput,
|
||||||
|
mockRepositoryNwo,
|
||||||
|
".",
|
||||||
|
features,
|
||||||
|
mockLogger,
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
|
||||||
|
} else {
|
||||||
|
// there is an error
|
||||||
|
await t.throwsAsync(
|
||||||
|
async () =>
|
||||||
|
await configUtils.getLanguages(
|
||||||
|
codeQL,
|
||||||
|
args.languagesInput,
|
||||||
|
mockRepositoryNwo,
|
||||||
|
".",
|
||||||
|
features,
|
||||||
|
mockLogger,
|
||||||
|
),
|
||||||
|
{ message: args.expectedError },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
||||||
});
|
});
|
||||||
|
}
|
||||||
if (args.expectedLanguages) {
|
|
||||||
// happy path
|
|
||||||
const actualLanguages = await configUtils.getLanguages(
|
|
||||||
codeQL,
|
|
||||||
args.languagesInput,
|
|
||||||
mockRepositoryNwo,
|
|
||||||
".",
|
|
||||||
mockLogger,
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(actualLanguages.sort(), args.expectedLanguages.sort());
|
|
||||||
} else {
|
|
||||||
// there is an error
|
|
||||||
await t.throwsAsync(
|
|
||||||
async () =>
|
|
||||||
await configUtils.getLanguages(
|
|
||||||
codeQL,
|
|
||||||
args.languagesInput,
|
|
||||||
mockRepositoryNwo,
|
|
||||||
".",
|
|
||||||
mockLogger,
|
|
||||||
),
|
|
||||||
{ message: args.expectedError },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const { displayName, language, feature } of [
|
for (const { displayName, language, feature } of [
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import {
|
|||||||
CodeQuality,
|
CodeQuality,
|
||||||
codeQualityQueries,
|
codeQualityQueries,
|
||||||
CodeScanning,
|
CodeScanning,
|
||||||
|
parseAnalysisKinds,
|
||||||
} from "./analyses";
|
} from "./analyses";
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||||
@@ -19,7 +20,6 @@ import {
|
|||||||
calculateAugmentation,
|
calculateAugmentation,
|
||||||
ExcludeQueryFilter,
|
ExcludeQueryFilter,
|
||||||
generateCodeScanningConfig,
|
generateCodeScanningConfig,
|
||||||
parseUserConfig,
|
|
||||||
UserConfig,
|
UserConfig,
|
||||||
} from "./config/db-config";
|
} from "./config/db-config";
|
||||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||||
@@ -34,7 +34,6 @@ import {
|
|||||||
OverlayDatabaseMode,
|
OverlayDatabaseMode,
|
||||||
} from "./overlay-database-utils";
|
} from "./overlay-database-utils";
|
||||||
import { RepositoryNwo } from "./repository";
|
import { RepositoryNwo } from "./repository";
|
||||||
import { ToolsFeature } from "./tools-features";
|
|
||||||
import { downloadTrapCaches } from "./trap-caching";
|
import { downloadTrapCaches } from "./trap-caching";
|
||||||
import {
|
import {
|
||||||
GitHubVersion,
|
GitHubVersion,
|
||||||
@@ -178,10 +177,12 @@ export interface Config {
|
|||||||
|
|
||||||
export async function getSupportedLanguageMap(
|
export async function getSupportedLanguageMap(
|
||||||
codeql: CodeQL,
|
codeql: CodeQL,
|
||||||
|
features: FeatureEnablement,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<Record<string, string>> {
|
): Promise<Record<string, string>> {
|
||||||
const resolveSupportedLanguagesUsingCli = await codeql.supportsFeature(
|
const resolveSupportedLanguagesUsingCli = await features.getValue(
|
||||||
ToolsFeature.BuiltinExtractorsSpecifyDefaultQueries,
|
Feature.ResolveSupportedLanguagesUsingCli,
|
||||||
|
codeql,
|
||||||
);
|
);
|
||||||
const resolveResult = await codeql.betterResolveLanguages({
|
const resolveResult = await codeql.betterResolveLanguages({
|
||||||
filterToLanguagesWithQueries: resolveSupportedLanguagesUsingCli,
|
filterToLanguagesWithQueries: resolveSupportedLanguagesUsingCli,
|
||||||
@@ -282,6 +283,7 @@ export async function getLanguages(
|
|||||||
languagesInput: string | undefined,
|
languagesInput: string | undefined,
|
||||||
repository: RepositoryNwo,
|
repository: RepositoryNwo,
|
||||||
sourceRoot: string,
|
sourceRoot: string,
|
||||||
|
features: FeatureEnablement,
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
): Promise<Language[]> {
|
): Promise<Language[]> {
|
||||||
// Obtain languages without filtering them.
|
// Obtain languages without filtering them.
|
||||||
@@ -292,7 +294,7 @@ export async function getLanguages(
|
|||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
const languageMap = await getSupportedLanguageMap(codeql, logger);
|
const languageMap = await getSupportedLanguageMap(codeql, features, logger);
|
||||||
const languagesSet = new Set<Language>();
|
const languagesSet = new Set<Language>();
|
||||||
const unknownLanguages: string[] = [];
|
const unknownLanguages: string[] = [];
|
||||||
|
|
||||||
@@ -371,8 +373,10 @@ export async function getRawLanguages(
|
|||||||
|
|
||||||
/** Inputs required to initialize a configuration. */
|
/** Inputs required to initialize a configuration. */
|
||||||
export interface InitConfigInputs {
|
export interface InitConfigInputs {
|
||||||
|
analysisKindsInput: string;
|
||||||
languagesInput: string | undefined;
|
languagesInput: string | undefined;
|
||||||
queriesInput: string | undefined;
|
queriesInput: string | undefined;
|
||||||
|
qualityQueriesInput: string | undefined;
|
||||||
packsInput: string | undefined;
|
packsInput: string | undefined;
|
||||||
configFile: string | undefined;
|
configFile: string | undefined;
|
||||||
dbLocation: string | undefined;
|
dbLocation: string | undefined;
|
||||||
@@ -392,7 +396,6 @@ export interface InitConfigInputs {
|
|||||||
apiDetails: api.GitHubApiCombinedDetails;
|
apiDetails: api.GitHubApiCombinedDetails;
|
||||||
features: FeatureEnablement;
|
features: FeatureEnablement;
|
||||||
repositoryProperties: RepositoryProperties;
|
repositoryProperties: RepositoryProperties;
|
||||||
analysisKinds: AnalysisKind[];
|
|
||||||
logger: Logger;
|
logger: Logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -402,8 +405,10 @@ export interface InitConfigInputs {
|
|||||||
*/
|
*/
|
||||||
export async function initActionState(
|
export async function initActionState(
|
||||||
{
|
{
|
||||||
|
analysisKindsInput,
|
||||||
languagesInput,
|
languagesInput,
|
||||||
queriesInput,
|
queriesInput,
|
||||||
|
qualityQueriesInput,
|
||||||
packsInput,
|
packsInput,
|
||||||
buildModeInput,
|
buildModeInput,
|
||||||
dbLocation,
|
dbLocation,
|
||||||
@@ -419,16 +424,28 @@ export async function initActionState(
|
|||||||
githubVersion,
|
githubVersion,
|
||||||
features,
|
features,
|
||||||
repositoryProperties,
|
repositoryProperties,
|
||||||
analysisKinds,
|
|
||||||
logger,
|
logger,
|
||||||
}: InitConfigInputs,
|
}: InitConfigInputs,
|
||||||
userConfig: UserConfig,
|
userConfig: UserConfig,
|
||||||
): Promise<Config> {
|
): Promise<Config> {
|
||||||
|
const analysisKinds = await parseAnalysisKinds(analysisKindsInput);
|
||||||
|
|
||||||
|
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||||
|
// if an input to `quality-queries` was specified. We should remove this once
|
||||||
|
// `quality-queries` is no longer used.
|
||||||
|
if (
|
||||||
|
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||||
|
qualityQueriesInput !== undefined
|
||||||
|
) {
|
||||||
|
analysisKinds.push(AnalysisKind.CodeQuality);
|
||||||
|
}
|
||||||
|
|
||||||
const languages = await getLanguages(
|
const languages = await getLanguages(
|
||||||
codeql,
|
codeql,
|
||||||
languagesInput,
|
languagesInput,
|
||||||
repository,
|
repository,
|
||||||
sourceRoot,
|
sourceRoot,
|
||||||
|
features,
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -523,12 +540,10 @@ async function downloadCacheWithTime(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function loadUserConfig(
|
async function loadUserConfig(
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
configFile: string,
|
||||||
workspacePath: string,
|
workspacePath: string,
|
||||||
apiDetails: api.GitHubApiCombinedDetails,
|
apiDetails: api.GitHubApiCombinedDetails,
|
||||||
tempDir: string,
|
tempDir: string,
|
||||||
validateConfig: boolean,
|
|
||||||
): Promise<UserConfig> {
|
): Promise<UserConfig> {
|
||||||
if (isLocal(configFile)) {
|
if (isLocal(configFile)) {
|
||||||
if (configFile !== userConfigFromActionPath(tempDir)) {
|
if (configFile !== userConfigFromActionPath(tempDir)) {
|
||||||
@@ -541,14 +556,9 @@ async function loadUserConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return getLocalConfig(logger, configFile, validateConfig);
|
return getLocalConfig(configFile);
|
||||||
} else {
|
} else {
|
||||||
return await getRemoteConfig(
|
return await getRemoteConfig(configFile, apiDetails);
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
apiDetails,
|
|
||||||
validateConfig,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -784,10 +794,7 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
|
|||||||
* This will parse the config from the user input if present, or generate
|
* This will parse the config from the user input if present, or generate
|
||||||
* a default config. The parsed config is then stored to a known location.
|
* a default config. The parsed config is then stored to a known location.
|
||||||
*/
|
*/
|
||||||
export async function initConfig(
|
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||||
features: FeatureEnablement,
|
|
||||||
inputs: InitConfigInputs,
|
|
||||||
): Promise<Config> {
|
|
||||||
const { logger, tempDir } = inputs;
|
const { logger, tempDir } = inputs;
|
||||||
|
|
||||||
// if configInput is set, it takes precedence over configFile
|
// if configInput is set, it takes precedence over configFile
|
||||||
@@ -807,14 +814,11 @@ export async function initConfig(
|
|||||||
logger.debug("No configuration file was provided");
|
logger.debug("No configuration file was provided");
|
||||||
} else {
|
} else {
|
||||||
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
||||||
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
|
|
||||||
userConfig = await loadUserConfig(
|
userConfig = await loadUserConfig(
|
||||||
logger,
|
|
||||||
inputs.configFile,
|
inputs.configFile,
|
||||||
inputs.workspacePath,
|
inputs.workspacePath,
|
||||||
inputs.apiDetails,
|
inputs.apiDetails,
|
||||||
tempDir,
|
tempDir,
|
||||||
validateConfig,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -908,11 +912,7 @@ function isLocal(configPath: string): boolean {
|
|||||||
return configPath.indexOf("@") === -1;
|
return configPath.indexOf("@") === -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getLocalConfig(
|
function getLocalConfig(configFile: string): UserConfig {
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
|
||||||
validateConfig: boolean,
|
|
||||||
): UserConfig {
|
|
||||||
// Error if the file does not exist
|
// Error if the file does not exist
|
||||||
if (!fs.existsSync(configFile)) {
|
if (!fs.existsSync(configFile)) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
@@ -920,19 +920,12 @@ function getLocalConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseUserConfig(
|
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
fs.readFileSync(configFile, "utf-8"),
|
|
||||||
validateConfig,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRemoteConfig(
|
async function getRemoteConfig(
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
configFile: string,
|
||||||
apiDetails: api.GitHubApiCombinedDetails,
|
apiDetails: api.GitHubApiCombinedDetails,
|
||||||
validateConfig: boolean,
|
|
||||||
): Promise<UserConfig> {
|
): Promise<UserConfig> {
|
||||||
// retrieve the various parts of the config location, and ensure they're present
|
// retrieve the various parts of the config location, and ensure they're present
|
||||||
const format = new RegExp(
|
const format = new RegExp(
|
||||||
@@ -940,7 +933,7 @@ async function getRemoteConfig(
|
|||||||
);
|
);
|
||||||
const pieces = format.exec(configFile);
|
const pieces = format.exec(configFile);
|
||||||
// 5 = 4 groups + the whole expression
|
// 5 = 4 groups + the whole expression
|
||||||
if (pieces?.groups === undefined || pieces.length < 5) {
|
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
||||||
);
|
);
|
||||||
@@ -968,12 +961,9 @@ async function getRemoteConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseUserConfig(
|
return yaml.load(
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
Buffer.from(fileContents, "base64").toString("binary"),
|
Buffer.from(fileContents, "base64").toString("binary"),
|
||||||
validateConfig,
|
) as UserConfig;
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1033,6 +1023,7 @@ export async function getConfig(
|
|||||||
* pack.
|
* pack.
|
||||||
*
|
*
|
||||||
* @param registriesInput The value of the `registries` input.
|
* @param registriesInput The value of the `registries` input.
|
||||||
|
* @param codeQL a codeQL object, used only for checking the version of CodeQL.
|
||||||
* @param tempDir a temporary directory to store the generated qlconfig.yml file.
|
* @param tempDir a temporary directory to store the generated qlconfig.yml file.
|
||||||
* @param logger a logger object.
|
* @param logger a logger object.
|
||||||
* @returns The path to the generated `qlconfig.yml` file and the auth tokens to
|
* @returns The path to the generated `qlconfig.yml` file and the auth tokens to
|
||||||
|
|||||||
@@ -2,13 +2,7 @@ import test, { ExecutionContext } from "ava";
|
|||||||
|
|
||||||
import { RepositoryProperties } from "../feature-flags/properties";
|
import { RepositoryProperties } from "../feature-flags/properties";
|
||||||
import { KnownLanguage, Language } from "../languages";
|
import { KnownLanguage, Language } from "../languages";
|
||||||
import { getRunnerLogger } from "../logging";
|
import { prettyPrintPack } from "../util";
|
||||||
import {
|
|
||||||
checkExpectedLogMessages,
|
|
||||||
getRecordingLogger,
|
|
||||||
LoggedMessage,
|
|
||||||
} from "../testing-utils";
|
|
||||||
import { ConfigurationError, prettyPrintPack } from "../util";
|
|
||||||
|
|
||||||
import * as dbConfig from "./db-config";
|
import * as dbConfig from "./db-config";
|
||||||
|
|
||||||
@@ -397,111 +391,3 @@ test(
|
|||||||
{},
|
{},
|
||||||
/"a-pack-without-a-scope" is not a valid pack/,
|
/"a-pack-without-a-scope" is not a valid pack/,
|
||||||
);
|
);
|
||||||
|
|
||||||
test("parseUserConfig - successfully parses valid YAML", (t) => {
|
|
||||||
const result = dbConfig.parseUserConfig(
|
|
||||||
getRunnerLogger(true),
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries:
|
|
||||||
- uses: foo
|
|
||||||
some-unknown-option: true
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
t.truthy(result);
|
|
||||||
if (t.truthy(result["paths-ignore"])) {
|
|
||||||
t.is(result["paths-ignore"].length, 1);
|
|
||||||
t.is(result["paths-ignore"][0], "some/path");
|
|
||||||
}
|
|
||||||
if (t.truthy(result["queries"])) {
|
|
||||||
t.is(result["queries"].length, 1);
|
|
||||||
t.deepEqual(result["queries"][0], { uses: "foo" });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
|
|
||||||
t.throws(
|
|
||||||
() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
getRunnerLogger(true),
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries:
|
|
||||||
- foo
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
{
|
|
||||||
instanceOf: ConfigurationError,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.notThrows(() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
query-filters:
|
|
||||||
- something
|
|
||||||
- include: foo
|
|
||||||
- exclude: bar
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.throws(
|
|
||||||
() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries: true
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
{
|
|
||||||
instanceOf: ConfigurationError,
|
|
||||||
message:
|
|
||||||
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const expectedMessages = ["instance.queries is not of a type(s) array"];
|
|
||||||
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.notThrows(() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries: true
|
|
||||||
`,
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
import * as jsonschema from "jsonschema";
|
|
||||||
import * as semver from "semver";
|
import * as semver from "semver";
|
||||||
|
|
||||||
import * as errorMessages from "../error-messages";
|
import * as errorMessages from "../error-messages";
|
||||||
@@ -160,6 +158,7 @@ const PACK_IDENTIFIER_PATTERN = (function () {
|
|||||||
* Version and path are optional.
|
* Version and path are optional.
|
||||||
*
|
*
|
||||||
* @param packStr the package specification to verify.
|
* @param packStr the package specification to verify.
|
||||||
|
* @param configFile Config file to use for error reporting
|
||||||
*/
|
*/
|
||||||
export function parsePacksSpecification(packStr: string): Pack {
|
export function parsePacksSpecification(packStr: string): Pack {
|
||||||
if (typeof packStr !== "string") {
|
if (typeof packStr !== "string") {
|
||||||
@@ -379,7 +378,10 @@ function combineQueries(
|
|||||||
const result: QuerySpec[] = [];
|
const result: QuerySpec[] = [];
|
||||||
|
|
||||||
// Query settings obtained from the repository properties have the highest precedence.
|
// Query settings obtained from the repository properties have the highest precedence.
|
||||||
if (augmentationProperties.repoPropertyQueries?.input) {
|
if (
|
||||||
|
augmentationProperties.repoPropertyQueries &&
|
||||||
|
augmentationProperties.repoPropertyQueries.input
|
||||||
|
) {
|
||||||
logger.info(
|
logger.info(
|
||||||
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
||||||
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
||||||
@@ -472,53 +474,3 @@ export function generateCodeScanningConfig(
|
|||||||
|
|
||||||
return augmentedConfig;
|
return augmentedConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to parse `contents` into a `UserConfig` value.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
|
|
||||||
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
|
|
||||||
* @param validateConfig Whether to validate the configuration file against the schema.
|
|
||||||
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
|
|
||||||
* @throws A `ConfigurationError` if parsing failed.
|
|
||||||
*/
|
|
||||||
export function parseUserConfig(
|
|
||||||
logger: Logger,
|
|
||||||
pathInput: string,
|
|
||||||
contents: string,
|
|
||||||
validateConfig: boolean,
|
|
||||||
): UserConfig {
|
|
||||||
try {
|
|
||||||
const schema =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
||||||
require("../../src/db-config-schema.json") as jsonschema.Schema;
|
|
||||||
|
|
||||||
const doc = yaml.load(contents);
|
|
||||||
|
|
||||||
if (validateConfig) {
|
|
||||||
const result = new jsonschema.Validator().validate(doc, schema);
|
|
||||||
|
|
||||||
if (result.errors.length > 0) {
|
|
||||||
for (const error of result.errors) {
|
|
||||||
logger.error(error.stack);
|
|
||||||
}
|
|
||||||
throw new ConfigurationError(
|
|
||||||
errorMessages.getInvalidConfigFileMessage(
|
|
||||||
pathInput,
|
|
||||||
result.errors.map((e) => e.stack),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return doc as UserConfig;
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof yaml.YAMLException) {
|
|
||||||
throw new ConfigurationError(
|
|
||||||
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import test from "ava";
|
|||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { AnalysisKind } from "./analyses";
|
|
||||||
import { GitHubApiDetails } from "./api-client";
|
import { GitHubApiDetails } from "./api-client";
|
||||||
import * as apiClient from "./api-client";
|
import * as apiClient from "./api-client";
|
||||||
import { createStubCodeQL } from "./codeql";
|
import { createStubCodeQL } from "./codeql";
|
||||||
@@ -109,39 +108,6 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
|
|
||||||
await withTmpDir(async (tmpDir) => {
|
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
|
||||||
sinon
|
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
|
||||||
.withArgs("upload-database")
|
|
||||||
.returns("true");
|
|
||||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
|
||||||
|
|
||||||
await mockHttpRequests(201);
|
|
||||||
|
|
||||||
const loggedMessages = [];
|
|
||||||
await uploadDatabases(
|
|
||||||
testRepoName,
|
|
||||||
getCodeQL(),
|
|
||||||
{
|
|
||||||
...getTestConfig(tmpDir),
|
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
|
||||||
},
|
|
||||||
testApiDetails,
|
|
||||||
getRecordingLogger(loggedMessages),
|
|
||||||
);
|
|
||||||
t.assert(
|
|
||||||
loggedMessages.find(
|
|
||||||
(v: LoggedMessage) =>
|
|
||||||
v.type === "debug" &&
|
|
||||||
v.message ===
|
|
||||||
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
|
|
||||||
) !== undefined,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Abort database upload if running against GHES", async (t) => {
|
test("Abort database upload if running against GHES", async (t) => {
|
||||||
await withTmpDir(async (tmpDir) => {
|
await withTmpDir(async (tmpDir) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { AnalysisKind } from "./analyses";
|
|
||||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||||
import { type CodeQL } from "./codeql";
|
import { type CodeQL } from "./codeql";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
@@ -23,13 +22,6 @@ export async function uploadDatabases(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
|
|
||||||
logger.debug(
|
|
||||||
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (util.isInTestMode()) {
|
if (util.isInTestMode()) {
|
||||||
logger.debug("In test mode. Skipping database upload.");
|
logger.debug("In test mode. Skipping database upload.");
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -1,145 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
||||||
"title": "CodeQL Database Configuration",
|
|
||||||
"description": "Format of the config file supplied by the user for CodeQL analysis",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Name of the configuration"
|
|
||||||
},
|
|
||||||
"disable-default-queries": {
|
|
||||||
"type": "boolean",
|
|
||||||
"description": "Whether to disable default queries"
|
|
||||||
},
|
|
||||||
"queries": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "List of additional queries to run",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/QuerySpec"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"paths-ignore": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Paths to ignore during analysis",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"paths": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Paths to include in analysis",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"packs": {
|
|
||||||
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"query-filters": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/QueryFilter"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": true,
|
|
||||||
"definitions": {
|
|
||||||
"QuerySpec": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Detailed query specification object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Optional name for the query"
|
|
||||||
},
|
|
||||||
"uses": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The query or query suite to use"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["uses"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"QueryFilter": {
|
|
||||||
"description": "Query filter that can either include or exclude queries",
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/ExcludeQueryFilter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/IncludeQueryFilter"
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"ExcludeQueryFilter": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Filter to exclude queries",
|
|
||||||
"properties": {
|
|
||||||
"exclude": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Queries to exclude",
|
|
||||||
"additionalProperties": {
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["exclude"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"IncludeQueryFilter": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Filter to include queries",
|
|
||||||
"properties": {
|
|
||||||
"include": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Queries to include",
|
|
||||||
"additionalProperties": {
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["include"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user