mirror of
https://github.com/github/codeql-action.git
synced 2025-12-15 20:09:17 +08:00
Compare commits
1 Commits
henrymerce
...
henrymerce
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82193d6143 |
11
.github/codeql/codeql-config-javascript.yml
vendored
11
.github/codeql/codeql-config-javascript.yml
vendored
@@ -1,15 +1,6 @@
|
|||||||
name: "CodeQL config"
|
name: "CodeQL config"
|
||||||
queries:
|
queries:
|
||||||
- name: Run custom queries
|
- ./queries
|
||||||
uses: ./queries
|
|
||||||
# Run all extra query suites, both because we want to
|
|
||||||
# and because it'll act as extra testing. This is why
|
|
||||||
# we include both even though one is a superset of the
|
|
||||||
# other, because we're testing the parsing logic and
|
|
||||||
# that the suites exist in the codeql bundle.
|
|
||||||
- uses: security-and-quality
|
|
||||||
- uses: security-experimental
|
|
||||||
- uses: security-extended
|
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- lib
|
- lib
|
||||||
- tests
|
- tests
|
||||||
|
|||||||
55
.github/sizeup.yml
vendored
55
.github/sizeup.yml
vendored
@@ -1,55 +0,0 @@
|
|||||||
labeling:
|
|
||||||
applyCategoryLabels: true
|
|
||||||
categoryLabelPrefix: "size/"
|
|
||||||
|
|
||||||
commenting:
|
|
||||||
addCommentWhenScoreThresholdHasBeenExceeded: false
|
|
||||||
|
|
||||||
sizeup:
|
|
||||||
categories:
|
|
||||||
- name: extra small
|
|
||||||
lte: 25
|
|
||||||
label:
|
|
||||||
name: XS
|
|
||||||
description: Should be very easy to review
|
|
||||||
color: 3cbf00
|
|
||||||
- name: small
|
|
||||||
lte: 100
|
|
||||||
label:
|
|
||||||
name: S
|
|
||||||
description: Should be easy to review
|
|
||||||
color: 5d9801
|
|
||||||
- name: medium
|
|
||||||
lte: 250
|
|
||||||
label:
|
|
||||||
name: M
|
|
||||||
description: Should be of average difficulty to review
|
|
||||||
color: 7f7203
|
|
||||||
- name: large
|
|
||||||
lte: 500
|
|
||||||
label:
|
|
||||||
name: L
|
|
||||||
description: May be hard to review
|
|
||||||
color: a14c05
|
|
||||||
- name: extra large
|
|
||||||
lte: 1000
|
|
||||||
label:
|
|
||||||
name: XL
|
|
||||||
description: May be very hard to review
|
|
||||||
color: c32607
|
|
||||||
- name: extra extra large
|
|
||||||
label:
|
|
||||||
name: XXL
|
|
||||||
description: May be extremely hard to review
|
|
||||||
color: e50009
|
|
||||||
ignoredFilePatterns:
|
|
||||||
- ".github/workflows/__*"
|
|
||||||
- "lib/**/*"
|
|
||||||
- "package-lock.json"
|
|
||||||
testFilePatterns:
|
|
||||||
- "**/*.test.ts"
|
|
||||||
scoring:
|
|
||||||
# This formula and the aliases below it are written in prefix notation.
|
|
||||||
# For an explanation of how this works, please see:
|
|
||||||
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
|
|
||||||
formula: "- - + additions deletions comments whitespace"
|
|
||||||
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
15
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
5
.github/workflows/__bundle-from-toolcache.yml
generated
vendored
@@ -67,9 +67,10 @@ jobs:
|
|||||||
if (allCodeqlVersions.length === 0) {
|
if (allCodeqlVersions.length === 0) {
|
||||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||||
}
|
}
|
||||||
- id: setup-codeql
|
- id: init
|
||||||
uses: ./../action/setup-codeql
|
uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Check CodeQL is installed within the toolcache
|
- name: Check CodeQL is installed within the toolcache
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
2
.github/workflows/__config-input.yml
generated
vendored
2
.github/workflows/__config-input.yml
generated
vendored
@@ -49,7 +49,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
15
.github/workflows/__local-bundle.yml
generated
vendored
15
.github/workflows/__local-bundle.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Fetch latest CodeQL bundle
|
- name: Fetch latest CodeQL bundle
|
||||||
run: |
|
run: |
|
||||||
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst
|
||||||
|
|||||||
55
.github/workflows/__multi-language-autodetect.yml
generated
vendored
55
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -9,6 +9,9 @@ env:
|
|||||||
GO111MODULE: auto
|
GO111MODULE: auto
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v*
|
||||||
pull_request:
|
pull_request:
|
||||||
types:
|
types:
|
||||||
- opened
|
- opened
|
||||||
@@ -24,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -36,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -53,8 +46,42 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.17.6
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.17.6
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.18.4
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.18.4
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.19.4
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.19.4
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.20.7
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.20.7
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.21.4
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.21.4
|
||||||
|
- os: macos-latest
|
||||||
|
version: stable-v2.22.4
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: stable-v2.22.4
|
||||||
|
- os: macos-latest
|
||||||
|
version: default
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: default
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: linked
|
version: linked
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: linked
|
||||||
|
- os: macos-latest
|
||||||
|
version: nightly-latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
name: Multi-language repository
|
name: Multi-language repository
|
||||||
if: github.triggering_actor != 'dependabot[bot]'
|
if: github.triggering_actor != 'dependabot[bot]'
|
||||||
permissions:
|
permissions:
|
||||||
@@ -77,11 +104,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Use Xcode 16
|
- name: Use Xcode 16
|
||||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||||
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
run: sudo xcode-select -s "/Applications/Xcode_16.app"
|
||||||
@@ -148,3 +170,6 @@ jobs:
|
|||||||
echo "Did not create a database for Swift, or created it in the wrong location."
|
echo "Did not create a database for Swift, or created it in the wrong location."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
env:
|
||||||
|
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
||||||
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
17
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -73,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
@@ -91,11 +81,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
config-file: .github/codeql/codeql-config-packaging3.yml
|
config-file: .github/codeql/codeql-config-packaging3.yml
|
||||||
|
|||||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -63,7 +63,7 @@ jobs:
|
|||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 20.x
|
node-version: 20.x
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
9
.github/workflows/__quality-queries.yml
generated
vendored
9
.github/workflows/__quality-queries.yml
generated
vendored
@@ -80,7 +80,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
upload-database: false
|
upload-database: false
|
||||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
|
||||||
- name: Upload security SARIF
|
- name: Upload security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -97,14 +96,6 @@ jobs:
|
|||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload post-processed SARIF
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: |
|
|
||||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
|
||||||
path: ${{ runner.temp }}/post-processed
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
- name: Check quality query does not appear in security SARIF
|
- name: Check quality query does not appear in security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
15
.github/workflows/__remote-config.yml
generated
vendored
15
.github/workflows/__remote-config.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -82,11 +72,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
15
.github/workflows/__unset-environment.yml
generated
vendored
15
.github/workflows/__unset-environment.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -82,11 +72,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
id: init
|
id: init
|
||||||
with:
|
with:
|
||||||
|
|||||||
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
15
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
15
.github/workflows/__upload-sarif.yml
generated
vendored
15
.github/workflows/__upload-sarif.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -87,11 +77,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
|||||||
15
.github/workflows/__with-checkout-path.yml
generated
vendored
15
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -27,11 +27,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
go-version:
|
go-version:
|
||||||
@@ -39,11 +34,6 @@ on:
|
|||||||
description: The version of Go to install
|
description: The version of Go to install
|
||||||
required: false
|
required: false
|
||||||
default: '>=1.21.0'
|
default: '>=1.21.0'
|
||||||
python-version:
|
|
||||||
type: string
|
|
||||||
description: The version of Python to install
|
|
||||||
required: false
|
|
||||||
default: '3.13'
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
@@ -80,11 +70,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
go-version: ${{ inputs.go-version || '>=1.21.0' }}
|
||||||
cache: false
|
cache: false
|
||||||
- name: Install Python
|
|
||||||
if: matrix.version != 'nightly-latest'
|
|
||||||
uses: actions/setup-python@v6
|
|
||||||
with:
|
|
||||||
python-version: ${{ inputs.python-version || '3.13' }}
|
|
||||||
- name: Delete original checkout
|
- name: Delete original checkout
|
||||||
run: |
|
run: |
|
||||||
# delete the original checkout so we don't accidentally use it.
|
# delete the original checkout so we don't accidentally use it.
|
||||||
|
|||||||
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -2,7 +2,6 @@ name: "CodeQL action"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [main, releases/v*]
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [main, releases/v*]
|
branches: [main, releases/v*]
|
||||||
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
# Run checks on reopened draft PRs to support triggering PR checks on draft PRs that were opened
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|||||||
26
.github/workflows/label-pr-size.yml
vendored
26
.github/workflows/label-pr-size.yml
vendored
@@ -1,26 +0,0 @@
|
|||||||
name: Label PR with size
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- synchronize
|
|
||||||
- reopened
|
|
||||||
- edited
|
|
||||||
- ready_for_review
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
sizeup:
|
|
||||||
name: Label PR with size
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Run sizeup
|
|
||||||
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
|
|
||||||
with:
|
|
||||||
token: "${{ secrets.GITHUB_TOKEN }}"
|
|
||||||
configuration-file-path: ".github/sizeup.yml"
|
|
||||||
2
.github/workflows/post-release-mergeback.yml
vendored
2
.github/workflows/post-release-mergeback.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||||
- uses: actions/setup-node@v6
|
- uses: actions/setup-node@v5
|
||||||
|
|
||||||
- name: Update git config
|
- name: Update git config
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v5
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|||||||
2
.github/workflows/query-filters.yml
vendored
2
.github/workflows/query-filters.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Install Node.js
|
- name: Install Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: npm
|
cache: npm
|
||||||
|
|||||||
2
.github/workflows/update-bundle.yml
vendored
2
.github/workflows/update-bundle.yml
vendored
@@ -41,7 +41,7 @@ jobs:
|
|||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 24
|
||||||
cache: 'npm'
|
cache: 'npm'
|
||||||
|
|||||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -6,16 +6,6 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
|||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|
||||||
## 4.31.0 - 24 Oct 2025
|
|
||||||
|
|
||||||
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
|
|
||||||
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
|
|
||||||
|
|
||||||
## 4.30.9 - 17 Oct 2025
|
|
||||||
|
|
||||||
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
|
|
||||||
- Experimental: A new `setup-codeql` action has been added which is similar to `init`, except it only installs the CodeQL CLI and does not initialize a database. Do not use this in production as it is part of an internal experiment and subject to change at any time. [#3204](https://github.com/github/codeql-action/pull/3204)
|
|
||||||
|
|
||||||
## 4.30.8 - 10 Oct 2025
|
## 4.30.8 - 10 Oct 2025
|
||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ Actions with special purposes and unlikely to be used directly:
|
|||||||
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
- `autobuild`: Attempts to automatically build the code. Only used for analyzing languages that require a build. Use the `build-mode: autobuild` input in the `init` action instead. For information about input parameters, see the [autobuild action definition](https://github.com/github/codeql-action/blob/main/autobuild/action.yml).
|
||||||
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
- `resolve-environment`: [Experimental] Attempts to infer a build environment suitable for automatic builds. For information about input parameters, see the [resolve-environment action definition](https://github.com/github/codeql-action/blob/main/resolve-environment/action.yml).
|
||||||
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
- `start-proxy`: [Experimental] Start the HTTP proxy server. Internal use only and will change without notice. For information about input parameters, see the [start-proxy action definition](https://github.com/github/codeql-action/blob/main/start-proxy/action.yml).
|
||||||
- `setup-codeql`: [Experimental] Similar to `init`, except it only installs the CodeQL CLI and does not initialize a database.
|
|
||||||
|
|
||||||
### Workflow Permissions
|
### Workflow Permissions
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ inputs:
|
|||||||
description: The name of the check run to add text to.
|
description: The name of the check run to add text to.
|
||||||
required: false
|
required: false
|
||||||
output:
|
output:
|
||||||
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
|
description: The path of the directory in which to save the SARIF results
|
||||||
required: false
|
required: false
|
||||||
default: "../results"
|
default: "../results"
|
||||||
upload:
|
upload:
|
||||||
@@ -70,12 +70,6 @@ inputs:
|
|||||||
description: Whether to upload the resulting CodeQL database
|
description: Whether to upload the resulting CodeQL database
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
post-processed-sarif-path:
|
|
||||||
description: >-
|
|
||||||
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
|
|
||||||
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
|
|
||||||
argument for this input, they are written to the specified directory.
|
|
||||||
required: false
|
|
||||||
wait-for-processing:
|
wait-for-processing:
|
||||||
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -131,7 +131,6 @@ export default [
|
|||||||
"no-sequences": "error",
|
"no-sequences": "error",
|
||||||
"no-shadow": "off",
|
"no-shadow": "off",
|
||||||
"@typescript-eslint/no-shadow": "error",
|
"@typescript-eslint/no-shadow": "error",
|
||||||
"@typescript-eslint/prefer-optional-chain": "error",
|
|
||||||
"one-var": ["error", "never"],
|
"one-var": ["error", "never"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
1364
lib/analyze-action-post.js
generated
1364
lib/analyze-action-post.js
generated
File diff suppressed because it is too large
Load Diff
3699
lib/analyze-action.js
generated
3699
lib/analyze-action.js
generated
File diff suppressed because it is too large
Load Diff
1639
lib/autobuild-action.js
generated
1639
lib/autobuild-action.js
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||||
"cliVersion": "2.23.3",
|
"cliVersion": "2.23.2",
|
||||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||||
"priorCliVersion": "2.23.2"
|
"priorCliVersion": "2.23.1"
|
||||||
}
|
}
|
||||||
|
|||||||
3349
lib/init-action-post.js
generated
3349
lib/init-action-post.js
generated
File diff suppressed because it is too large
Load Diff
2224
lib/init-action.js
generated
2224
lib/init-action.js
generated
File diff suppressed because it is too large
Load Diff
1628
lib/resolve-environment-action.js
generated
1628
lib/resolve-environment-action.js
generated
File diff suppressed because it is too large
Load Diff
88821
lib/setup-codeql-action.js
generated
88821
lib/setup-codeql-action.js
generated
File diff suppressed because one or more lines are too long
1358
lib/start-proxy-action-post.js
generated
1358
lib/start-proxy-action-post.js
generated
File diff suppressed because it is too large
Load Diff
1623
lib/start-proxy-action.js
generated
1623
lib/start-proxy-action.js
generated
File diff suppressed because it is too large
Load Diff
352
lib/upload-lib.js
generated
352
lib/upload-lib.js
generated
@@ -20885,19 +20885,19 @@ var require_validator = __commonJS({
|
|||||||
var SchemaError = helpers.SchemaError;
|
var SchemaError = helpers.SchemaError;
|
||||||
var SchemaContext = helpers.SchemaContext;
|
var SchemaContext = helpers.SchemaContext;
|
||||||
var anonymousBase = "/";
|
var anonymousBase = "/";
|
||||||
var Validator3 = function Validator4() {
|
var Validator2 = function Validator3() {
|
||||||
this.customFormats = Object.create(Validator4.prototype.customFormats);
|
this.customFormats = Object.create(Validator3.prototype.customFormats);
|
||||||
this.schemas = {};
|
this.schemas = {};
|
||||||
this.unresolvedRefs = [];
|
this.unresolvedRefs = [];
|
||||||
this.types = Object.create(types);
|
this.types = Object.create(types);
|
||||||
this.attributes = Object.create(attribute.validators);
|
this.attributes = Object.create(attribute.validators);
|
||||||
};
|
};
|
||||||
Validator3.prototype.customFormats = {};
|
Validator2.prototype.customFormats = {};
|
||||||
Validator3.prototype.schemas = null;
|
Validator2.prototype.schemas = null;
|
||||||
Validator3.prototype.types = null;
|
Validator2.prototype.types = null;
|
||||||
Validator3.prototype.attributes = null;
|
Validator2.prototype.attributes = null;
|
||||||
Validator3.prototype.unresolvedRefs = null;
|
Validator2.prototype.unresolvedRefs = null;
|
||||||
Validator3.prototype.addSchema = function addSchema(schema2, base) {
|
Validator2.prototype.addSchema = function addSchema(schema2, base) {
|
||||||
var self2 = this;
|
var self2 = this;
|
||||||
if (!schema2) {
|
if (!schema2) {
|
||||||
return null;
|
return null;
|
||||||
@@ -20915,25 +20915,25 @@ var require_validator = __commonJS({
|
|||||||
});
|
});
|
||||||
return this.schemas[ourUri];
|
return this.schemas[ourUri];
|
||||||
};
|
};
|
||||||
Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
|
Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
|
||||||
if (!Array.isArray(schemas)) return;
|
if (!Array.isArray(schemas)) return;
|
||||||
for (var i = 0; i < schemas.length; i++) {
|
for (var i = 0; i < schemas.length; i++) {
|
||||||
this.addSubSchema(baseuri, schemas[i]);
|
this.addSubSchema(baseuri, schemas[i]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
|
Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
|
||||||
if (!schemas || typeof schemas != "object") return;
|
if (!schemas || typeof schemas != "object") return;
|
||||||
for (var p in schemas) {
|
for (var p in schemas) {
|
||||||
this.addSubSchema(baseuri, schemas[p]);
|
this.addSubSchema(baseuri, schemas[p]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
Validator3.prototype.setSchemas = function setSchemas(schemas) {
|
Validator2.prototype.setSchemas = function setSchemas(schemas) {
|
||||||
this.schemas = schemas;
|
this.schemas = schemas;
|
||||||
};
|
};
|
||||||
Validator3.prototype.getSchema = function getSchema(urn) {
|
Validator2.prototype.getSchema = function getSchema(urn) {
|
||||||
return this.schemas[urn];
|
return this.schemas[urn];
|
||||||
};
|
};
|
||||||
Validator3.prototype.validate = function validate(instance, schema2, options, ctx) {
|
Validator2.prototype.validate = function validate(instance, schema2, options, ctx) {
|
||||||
if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) {
|
if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) {
|
||||||
throw new SchemaError("Expected `schema` to be an object or boolean");
|
throw new SchemaError("Expected `schema` to be an object or boolean");
|
||||||
}
|
}
|
||||||
@@ -20971,7 +20971,7 @@ var require_validator = __commonJS({
|
|||||||
if (typeof ref == "string") return ref;
|
if (typeof ref == "string") return ref;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
|
Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
|
||||||
var result = new ValidatorResult(instance, schema2, options, ctx);
|
var result = new ValidatorResult(instance, schema2, options, ctx);
|
||||||
if (typeof schema2 === "boolean") {
|
if (typeof schema2 === "boolean") {
|
||||||
if (schema2 === true) {
|
if (schema2 === true) {
|
||||||
@@ -21021,17 +21021,17 @@ var require_validator = __commonJS({
|
|||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
|
Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
|
||||||
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
|
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
|
||||||
};
|
};
|
||||||
Validator3.prototype.superResolve = function superResolve(schema2, ctx) {
|
Validator2.prototype.superResolve = function superResolve(schema2, ctx) {
|
||||||
var ref = shouldResolve(schema2);
|
var ref = shouldResolve(schema2);
|
||||||
if (ref) {
|
if (ref) {
|
||||||
return this.resolve(schema2, ref, ctx).subschema;
|
return this.resolve(schema2, ref, ctx).subschema;
|
||||||
}
|
}
|
||||||
return schema2;
|
return schema2;
|
||||||
};
|
};
|
||||||
Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
|
Validator2.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
|
||||||
switchSchema = ctx.resolve(switchSchema);
|
switchSchema = ctx.resolve(switchSchema);
|
||||||
if (ctx.schemas[switchSchema]) {
|
if (ctx.schemas[switchSchema]) {
|
||||||
return { subschema: ctx.schemas[switchSchema], switchSchema };
|
return { subschema: ctx.schemas[switchSchema], switchSchema };
|
||||||
@@ -21048,7 +21048,7 @@ var require_validator = __commonJS({
|
|||||||
}
|
}
|
||||||
return { subschema, switchSchema };
|
return { subschema, switchSchema };
|
||||||
};
|
};
|
||||||
Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
|
Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
|
||||||
if (type2 === void 0) {
|
if (type2 === void 0) {
|
||||||
return;
|
return;
|
||||||
} else if (type2 === null) {
|
} else if (type2 === null) {
|
||||||
@@ -21063,7 +21063,7 @@ var require_validator = __commonJS({
|
|||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
var types = Validator3.prototype.types = {};
|
var types = Validator2.prototype.types = {};
|
||||||
types.string = function testString(instance) {
|
types.string = function testString(instance) {
|
||||||
return typeof instance == "string";
|
return typeof instance == "string";
|
||||||
};
|
};
|
||||||
@@ -21091,7 +21091,7 @@ var require_validator = __commonJS({
|
|||||||
types.object = function testObject(instance) {
|
types.object = function testObject(instance) {
|
||||||
return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date);
|
return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date);
|
||||||
};
|
};
|
||||||
module2.exports = Validator3;
|
module2.exports = Validator2;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -21099,7 +21099,7 @@ var require_validator = __commonJS({
|
|||||||
var require_lib2 = __commonJS({
|
var require_lib2 = __commonJS({
|
||||||
"node_modules/jsonschema/lib/index.js"(exports2, module2) {
|
"node_modules/jsonschema/lib/index.js"(exports2, module2) {
|
||||||
"use strict";
|
"use strict";
|
||||||
var Validator3 = module2.exports.Validator = require_validator();
|
var Validator2 = module2.exports.Validator = require_validator();
|
||||||
module2.exports.ValidatorResult = require_helpers().ValidatorResult;
|
module2.exports.ValidatorResult = require_helpers().ValidatorResult;
|
||||||
module2.exports.ValidatorResultError = require_helpers().ValidatorResultError;
|
module2.exports.ValidatorResultError = require_helpers().ValidatorResultError;
|
||||||
module2.exports.ValidationError = require_helpers().ValidationError;
|
module2.exports.ValidationError = require_helpers().ValidationError;
|
||||||
@@ -21107,7 +21107,7 @@ var require_lib2 = __commonJS({
|
|||||||
module2.exports.SchemaScanResult = require_scan().SchemaScanResult;
|
module2.exports.SchemaScanResult = require_scan().SchemaScanResult;
|
||||||
module2.exports.scan = require_scan().scan;
|
module2.exports.scan = require_scan().scan;
|
||||||
module2.exports.validate = function(instance, schema2, options) {
|
module2.exports.validate = function(instance, schema2, options) {
|
||||||
var v = new Validator3();
|
var v = new Validator2();
|
||||||
return v.validate(instance, schema2, options);
|
return v.validate(instance, schema2, options);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -21899,14 +21899,14 @@ var require_dist_node4 = __commonJS({
|
|||||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||||
var dist_src_exports = {};
|
var dist_src_exports = {};
|
||||||
__export2(dist_src_exports, {
|
__export2(dist_src_exports, {
|
||||||
RequestError: () => RequestError
|
RequestError: () => RequestError2
|
||||||
});
|
});
|
||||||
module2.exports = __toCommonJS2(dist_src_exports);
|
module2.exports = __toCommonJS2(dist_src_exports);
|
||||||
var import_deprecation = require_dist_node3();
|
var import_deprecation = require_dist_node3();
|
||||||
var import_once = __toESM2(require_once());
|
var import_once = __toESM2(require_once());
|
||||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var RequestError = class extends Error {
|
var RequestError2 = class extends Error {
|
||||||
constructor(message, statusCode, options) {
|
constructor(message, statusCode, options) {
|
||||||
super(message);
|
super(message);
|
||||||
if (Error.captureStackTrace) {
|
if (Error.captureStackTrace) {
|
||||||
@@ -21998,7 +21998,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||||
}
|
}
|
||||||
var import_request_error = require_dist_node4();
|
var import_request_error2 = require_dist_node4();
|
||||||
function getBufferResponse(response) {
|
function getBufferResponse(response) {
|
||||||
return response.arrayBuffer();
|
return response.arrayBuffer();
|
||||||
}
|
}
|
||||||
@@ -22050,7 +22050,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
if (status < 400) {
|
if (status < 400) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
throw new import_request_error.RequestError(response.statusText, status, {
|
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22061,7 +22061,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (status === 304) {
|
if (status === 304) {
|
||||||
throw new import_request_error.RequestError("Not modified", status, {
|
throw new import_request_error2.RequestError("Not modified", status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22073,7 +22073,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
}
|
}
|
||||||
if (status >= 400) {
|
if (status >= 400) {
|
||||||
const data = await getResponseData(response);
|
const data = await getResponseData(response);
|
||||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22093,7 +22093,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
data
|
data
|
||||||
};
|
};
|
||||||
}).catch((error2) => {
|
}).catch((error2) => {
|
||||||
if (error2 instanceof import_request_error.RequestError)
|
if (error2 instanceof import_request_error2.RequestError)
|
||||||
throw error2;
|
throw error2;
|
||||||
else if (error2.name === "AbortError")
|
else if (error2.name === "AbortError")
|
||||||
throw error2;
|
throw error2;
|
||||||
@@ -22105,7 +22105,7 @@ var require_dist_node5 = __commonJS({
|
|||||||
message = error2.cause;
|
message = error2.cause;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new import_request_error.RequestError(message, 500, {
|
throw new import_request_error2.RequestError(message, 500, {
|
||||||
request: requestOptions
|
request: requestOptions
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -22547,14 +22547,14 @@ var require_dist_node7 = __commonJS({
|
|||||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||||
var dist_src_exports = {};
|
var dist_src_exports = {};
|
||||||
__export2(dist_src_exports, {
|
__export2(dist_src_exports, {
|
||||||
RequestError: () => RequestError
|
RequestError: () => RequestError2
|
||||||
});
|
});
|
||||||
module2.exports = __toCommonJS2(dist_src_exports);
|
module2.exports = __toCommonJS2(dist_src_exports);
|
||||||
var import_deprecation = require_dist_node3();
|
var import_deprecation = require_dist_node3();
|
||||||
var import_once = __toESM2(require_once());
|
var import_once = __toESM2(require_once());
|
||||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var RequestError = class extends Error {
|
var RequestError2 = class extends Error {
|
||||||
constructor(message, statusCode, options) {
|
constructor(message, statusCode, options) {
|
||||||
super(message);
|
super(message);
|
||||||
if (Error.captureStackTrace) {
|
if (Error.captureStackTrace) {
|
||||||
@@ -22646,7 +22646,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
|
||||||
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
|
||||||
}
|
}
|
||||||
var import_request_error = require_dist_node7();
|
var import_request_error2 = require_dist_node7();
|
||||||
function getBufferResponse(response) {
|
function getBufferResponse(response) {
|
||||||
return response.arrayBuffer();
|
return response.arrayBuffer();
|
||||||
}
|
}
|
||||||
@@ -22698,7 +22698,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
if (status < 400) {
|
if (status < 400) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
throw new import_request_error.RequestError(response.statusText, status, {
|
throw new import_request_error2.RequestError(response.statusText, status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22709,7 +22709,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (status === 304) {
|
if (status === 304) {
|
||||||
throw new import_request_error.RequestError("Not modified", status, {
|
throw new import_request_error2.RequestError("Not modified", status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22721,7 +22721,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
}
|
}
|
||||||
if (status >= 400) {
|
if (status >= 400) {
|
||||||
const data = await getResponseData(response);
|
const data = await getResponseData(response);
|
||||||
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
|
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
|
||||||
response: {
|
response: {
|
||||||
url: url2,
|
url: url2,
|
||||||
status,
|
status,
|
||||||
@@ -22741,7 +22741,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
data
|
data
|
||||||
};
|
};
|
||||||
}).catch((error2) => {
|
}).catch((error2) => {
|
||||||
if (error2 instanceof import_request_error.RequestError)
|
if (error2 instanceof import_request_error2.RequestError)
|
||||||
throw error2;
|
throw error2;
|
||||||
else if (error2.name === "AbortError")
|
else if (error2.name === "AbortError")
|
||||||
throw error2;
|
throw error2;
|
||||||
@@ -22753,7 +22753,7 @@ var require_dist_node8 = __commonJS({
|
|||||||
message = error2.cause;
|
message = error2.cause;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
throw new import_request_error.RequestError(message, 500, {
|
throw new import_request_error2.RequestError(message, 500, {
|
||||||
request: requestOptions
|
request: requestOptions
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -33606,7 +33606,7 @@ var require_package = __commonJS({
|
|||||||
"package.json"(exports2, module2) {
|
"package.json"(exports2, module2) {
|
||||||
module2.exports = {
|
module2.exports = {
|
||||||
name: "codeql",
|
name: "codeql",
|
||||||
version: "4.31.1",
|
version: "4.30.9",
|
||||||
private: true,
|
private: true,
|
||||||
description: "CodeQL action",
|
description: "CodeQL action",
|
||||||
scripts: {
|
scripts: {
|
||||||
@@ -33644,6 +33644,7 @@ var require_package = __commonJS({
|
|||||||
"@octokit/request-error": "^7.0.1",
|
"@octokit/request-error": "^7.0.1",
|
||||||
"@schemastore/package": "0.0.10",
|
"@schemastore/package": "0.0.10",
|
||||||
archiver: "^7.0.1",
|
archiver: "^7.0.1",
|
||||||
|
"check-disk-space": "^3.4.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
del: "^8.0.0",
|
del: "^8.0.0",
|
||||||
"fast-deep-equal": "^3.1.3",
|
"fast-deep-equal": "^3.1.3",
|
||||||
@@ -33653,7 +33654,7 @@ var require_package = __commonJS({
|
|||||||
jsonschema: "1.4.1",
|
jsonschema: "1.4.1",
|
||||||
long: "^5.3.2",
|
long: "^5.3.2",
|
||||||
"node-forge": "^1.3.1",
|
"node-forge": "^1.3.1",
|
||||||
octokit: "^5.0.4",
|
octokit: "^5.0.3",
|
||||||
semver: "^7.7.3",
|
semver: "^7.7.3",
|
||||||
uuid: "^13.0.0"
|
uuid: "^13.0.0"
|
||||||
},
|
},
|
||||||
@@ -33661,7 +33662,7 @@ var require_package = __commonJS({
|
|||||||
"@ava/typescript": "6.0.0",
|
"@ava/typescript": "6.0.0",
|
||||||
"@eslint/compat": "^1.4.0",
|
"@eslint/compat": "^1.4.0",
|
||||||
"@eslint/eslintrc": "^3.3.1",
|
"@eslint/eslintrc": "^3.3.1",
|
||||||
"@eslint/js": "^9.38.0",
|
"@eslint/js": "^9.37.0",
|
||||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||||
"@octokit/types": "^15.0.0",
|
"@octokit/types": "^15.0.0",
|
||||||
"@types/archiver": "^6.0.3",
|
"@types/archiver": "^6.0.3",
|
||||||
@@ -33672,10 +33673,10 @@ var require_package = __commonJS({
|
|||||||
"@types/node-forge": "^1.3.14",
|
"@types/node-forge": "^1.3.14",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
"@types/sinon": "^17.0.4",
|
"@types/sinon": "^17.0.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.46.1",
|
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||||
"@typescript-eslint/parser": "^8.41.0",
|
"@typescript-eslint/parser": "^8.41.0",
|
||||||
ava: "^6.4.1",
|
ava: "^6.4.1",
|
||||||
esbuild: "^0.25.11",
|
esbuild: "^0.25.10",
|
||||||
eslint: "^8.57.1",
|
eslint: "^8.57.1",
|
||||||
"eslint-import-resolver-typescript": "^3.8.7",
|
"eslint-import-resolver-typescript": "^3.8.7",
|
||||||
"eslint-plugin-filenames": "^1.3.2",
|
"eslint-plugin-filenames": "^1.3.2",
|
||||||
@@ -35064,14 +35065,14 @@ var require_dist_node14 = __commonJS({
|
|||||||
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
|
||||||
var dist_src_exports = {};
|
var dist_src_exports = {};
|
||||||
__export2(dist_src_exports, {
|
__export2(dist_src_exports, {
|
||||||
RequestError: () => RequestError
|
RequestError: () => RequestError2
|
||||||
});
|
});
|
||||||
module2.exports = __toCommonJS2(dist_src_exports);
|
module2.exports = __toCommonJS2(dist_src_exports);
|
||||||
var import_deprecation = require_dist_node3();
|
var import_deprecation = require_dist_node3();
|
||||||
var import_once = __toESM2(require_once());
|
var import_once = __toESM2(require_once());
|
||||||
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
|
||||||
var RequestError = class extends Error {
|
var RequestError2 = class extends Error {
|
||||||
constructor(message, statusCode, options) {
|
constructor(message, statusCode, options) {
|
||||||
super(message);
|
super(message);
|
||||||
if (Error.captureStackTrace) {
|
if (Error.captureStackTrace) {
|
||||||
@@ -35173,7 +35174,7 @@ var require_dist_node15 = __commonJS({
|
|||||||
throw error2;
|
throw error2;
|
||||||
}
|
}
|
||||||
var import_light = __toESM2(require_light());
|
var import_light = __toESM2(require_light());
|
||||||
var import_request_error = require_dist_node14();
|
var import_request_error2 = require_dist_node14();
|
||||||
async function wrapRequest(state, octokit, request, options) {
|
async function wrapRequest(state, octokit, request, options) {
|
||||||
const limiter = new import_light.default();
|
const limiter = new import_light.default();
|
||||||
limiter.on("failed", function(error2, info4) {
|
limiter.on("failed", function(error2, info4) {
|
||||||
@@ -35194,7 +35195,7 @@ var require_dist_node15 = __commonJS({
|
|||||||
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
|
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
|
||||||
response.data.errors[0].message
|
response.data.errors[0].message
|
||||||
)) {
|
)) {
|
||||||
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
|
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
|
||||||
request: options,
|
request: options,
|
||||||
response
|
response
|
||||||
});
|
});
|
||||||
@@ -80920,14 +80921,14 @@ var require_tool_cache = __commonJS({
|
|||||||
var assert_1 = require("assert");
|
var assert_1 = require("assert");
|
||||||
var exec_1 = require_exec();
|
var exec_1 = require_exec();
|
||||||
var retry_helper_1 = require_retry_helper();
|
var retry_helper_1 = require_retry_helper();
|
||||||
var HTTPError2 = class extends Error {
|
var HTTPError = class extends Error {
|
||||||
constructor(httpStatusCode) {
|
constructor(httpStatusCode) {
|
||||||
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
super(`Unexpected HTTP response: ${httpStatusCode}`);
|
||||||
this.httpStatusCode = httpStatusCode;
|
this.httpStatusCode = httpStatusCode;
|
||||||
Object.setPrototypeOf(this, new.target.prototype);
|
Object.setPrototypeOf(this, new.target.prototype);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports2.HTTPError = HTTPError2;
|
exports2.HTTPError = HTTPError;
|
||||||
var IS_WINDOWS = process.platform === "win32";
|
var IS_WINDOWS = process.platform === "win32";
|
||||||
var IS_MAC = process.platform === "darwin";
|
var IS_MAC = process.platform === "darwin";
|
||||||
var userAgent = "actions/tool-cache";
|
var userAgent = "actions/tool-cache";
|
||||||
@@ -80944,7 +80945,7 @@ var require_tool_cache = __commonJS({
|
|||||||
return yield retryHelper.execute(() => __awaiter4(this, void 0, void 0, function* () {
|
return yield retryHelper.execute(() => __awaiter4(this, void 0, void 0, function* () {
|
||||||
return yield downloadToolAttempt(url2, dest || "", auth, headers);
|
return yield downloadToolAttempt(url2, dest || "", auth, headers);
|
||||||
}), (err) => {
|
}), (err) => {
|
||||||
if (err instanceof HTTPError2 && err.httpStatusCode) {
|
if (err instanceof HTTPError && err.httpStatusCode) {
|
||||||
if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) {
|
if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -80971,7 +80972,7 @@ var require_tool_cache = __commonJS({
|
|||||||
}
|
}
|
||||||
const response = yield http.get(url2, headers);
|
const response = yield http.get(url2, headers);
|
||||||
if (response.message.statusCode !== 200) {
|
if (response.message.statusCode !== 200) {
|
||||||
const err = new HTTPError2(response.message.statusCode);
|
const err = new HTTPError(response.message.statusCode);
|
||||||
core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
@@ -84846,7 +84847,6 @@ __export(upload_lib_exports, {
|
|||||||
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
|
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
|
||||||
getSarifFilePaths: () => getSarifFilePaths,
|
getSarifFilePaths: () => getSarifFilePaths,
|
||||||
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
||||||
postProcessSarifFiles: () => postProcessSarifFiles,
|
|
||||||
readSarifFile: () => readSarifFile,
|
readSarifFile: () => readSarifFile,
|
||||||
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
|
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
|
||||||
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
|
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
|
||||||
@@ -84854,11 +84854,10 @@ __export(upload_lib_exports, {
|
|||||||
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
|
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
|
||||||
uploadFiles: () => uploadFiles,
|
uploadFiles: () => uploadFiles,
|
||||||
uploadPayload: () => uploadPayload,
|
uploadPayload: () => uploadPayload,
|
||||||
uploadPostProcessedFiles: () => uploadPostProcessedFiles,
|
uploadSpecifiedFiles: () => uploadSpecifiedFiles,
|
||||||
validateSarifFileSchema: () => validateSarifFileSchema,
|
validateSarifFileSchema: () => validateSarifFileSchema,
|
||||||
validateUniqueCategory: () => validateUniqueCategory,
|
validateUniqueCategory: () => validateUniqueCategory,
|
||||||
waitForProcessing: () => waitForProcessing,
|
waitForProcessing: () => waitForProcessing
|
||||||
writePostProcessedFiles: () => writePostProcessedFiles
|
|
||||||
});
|
});
|
||||||
module.exports = __toCommonJS(upload_lib_exports);
|
module.exports = __toCommonJS(upload_lib_exports);
|
||||||
var fs13 = __toESM(require("fs"));
|
var fs13 = __toESM(require("fs"));
|
||||||
@@ -84866,7 +84865,7 @@ var path14 = __toESM(require("path"));
|
|||||||
var url = __toESM(require("url"));
|
var url = __toESM(require("url"));
|
||||||
var import_zlib = __toESM(require("zlib"));
|
var import_zlib = __toESM(require("zlib"));
|
||||||
var core11 = __toESM(require_core());
|
var core11 = __toESM(require_core());
|
||||||
var jsonschema2 = __toESM(require_lib2());
|
var jsonschema = __toESM(require_lib2());
|
||||||
|
|
||||||
// src/actions-util.ts
|
// src/actions-util.ts
|
||||||
var fs4 = __toESM(require("fs"));
|
var fs4 = __toESM(require("fs"));
|
||||||
@@ -88329,35 +88328,13 @@ function getRequiredEnvParam(paramName) {
|
|||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
function getOptionalEnvVar(paramName) {
|
|
||||||
const value = process.env[paramName];
|
|
||||||
if (value?.trim().length === 0) {
|
|
||||||
return void 0;
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
var HTTPError = class extends Error {
|
|
||||||
constructor(message, status) {
|
|
||||||
super(message);
|
|
||||||
this.status = status;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
var ConfigurationError = class extends Error {
|
var ConfigurationError = class extends Error {
|
||||||
constructor(message) {
|
constructor(message) {
|
||||||
super(message);
|
super(message);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
function asHTTPError(arg) {
|
function isHTTPError(arg) {
|
||||||
if (typeof arg !== "object" || arg === null || typeof arg.message !== "string") {
|
return arg?.status !== void 0 && Number.isInteger(arg.status);
|
||||||
return void 0;
|
|
||||||
}
|
|
||||||
if (Number.isInteger(arg.status)) {
|
|
||||||
return new HTTPError(arg.message, arg.status);
|
|
||||||
}
|
|
||||||
if (Number.isInteger(arg.httpStatusCode)) {
|
|
||||||
return new HTTPError(arg.message, arg.httpStatusCode);
|
|
||||||
}
|
|
||||||
return void 0;
|
|
||||||
}
|
}
|
||||||
var cachedCodeQlVersion = void 0;
|
var cachedCodeQlVersion = void 0;
|
||||||
function cacheCodeQlVersion(version) {
|
function cacheCodeQlVersion(version) {
|
||||||
@@ -88770,24 +88747,14 @@ function computeAutomationID(analysis_key, environment) {
|
|||||||
return automationID;
|
return automationID;
|
||||||
}
|
}
|
||||||
function wrapApiConfigurationError(e) {
|
function wrapApiConfigurationError(e) {
|
||||||
const httpError = asHTTPError(e);
|
if (isHTTPError(e)) {
|
||||||
if (httpError !== void 0) {
|
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
|
||||||
if ([
|
return new ConfigurationError(e.message);
|
||||||
/API rate limit exceeded/,
|
} else if (e.message.includes("Bad credentials") || e.message.includes("Not Found")) {
|
||||||
/commit not found/,
|
|
||||||
/Resource not accessible by integration/,
|
|
||||||
/ref .* not found in this repository/
|
|
||||||
].some((pattern) => pattern.test(httpError.message))) {
|
|
||||||
return new ConfigurationError(httpError.message);
|
|
||||||
}
|
|
||||||
if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found")) {
|
|
||||||
return new ConfigurationError(
|
return new ConfigurationError(
|
||||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write"
|
"Please check that your token is valid and has the required permissions: contents: read, security-events: write"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (httpError.status === 429) {
|
|
||||||
return new ConfigurationError("API rate limit exceeded");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
@@ -88798,6 +88765,45 @@ var path12 = __toESM(require("path"));
|
|||||||
var core10 = __toESM(require_core());
|
var core10 = __toESM(require_core());
|
||||||
var toolrunner3 = __toESM(require_toolrunner());
|
var toolrunner3 = __toESM(require_toolrunner());
|
||||||
|
|
||||||
|
// node_modules/@octokit/request-error/dist-src/index.js
|
||||||
|
var RequestError = class extends Error {
|
||||||
|
name;
|
||||||
|
/**
|
||||||
|
* http status code
|
||||||
|
*/
|
||||||
|
status;
|
||||||
|
/**
|
||||||
|
* Request options that lead to the error.
|
||||||
|
*/
|
||||||
|
request;
|
||||||
|
/**
|
||||||
|
* Response object if a response was received
|
||||||
|
*/
|
||||||
|
response;
|
||||||
|
constructor(message, statusCode, options) {
|
||||||
|
super(message);
|
||||||
|
this.name = "HttpError";
|
||||||
|
this.status = Number.parseInt(statusCode);
|
||||||
|
if (Number.isNaN(this.status)) {
|
||||||
|
this.status = 0;
|
||||||
|
}
|
||||||
|
if ("response" in options) {
|
||||||
|
this.response = options.response;
|
||||||
|
}
|
||||||
|
const requestCopy = Object.assign({}, options.request);
|
||||||
|
if (options.request.headers.authorization) {
|
||||||
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
||||||
|
authorization: options.request.headers.authorization.replace(
|
||||||
|
/(?<! ) .*$/,
|
||||||
|
" [REDACTED]"
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
||||||
|
this.request = requestCopy;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// src/cli-errors.ts
|
// src/cli-errors.ts
|
||||||
var SUPPORTED_PLATFORMS = [
|
var SUPPORTED_PLATFORMS = [
|
||||||
["linux", "x64"],
|
["linux", "x64"],
|
||||||
@@ -88965,9 +88971,6 @@ var cliErrorsConfig = {
|
|||||||
cliErrorMessageCandidates: [
|
cliErrorMessageCandidates: [
|
||||||
new RegExp(
|
new RegExp(
|
||||||
"Query pack .* cannot be found\\. Check the spelling of the pack\\."
|
"Query pack .* cannot be found\\. Check the spelling of the pack\\."
|
||||||
),
|
|
||||||
new RegExp(
|
|
||||||
"is not a .ql file, .qls file, a directory, or a query pack specification."
|
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -89046,7 +89049,6 @@ var path9 = __toESM(require("path"));
|
|||||||
var core6 = __toESM(require_core());
|
var core6 = __toESM(require_core());
|
||||||
|
|
||||||
// src/config/db-config.ts
|
// src/config/db-config.ts
|
||||||
var jsonschema = __toESM(require_lib2());
|
|
||||||
var semver2 = __toESM(require_semver2());
|
var semver2 = __toESM(require_semver2());
|
||||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||||
const alphaNumeric = "[a-z0-9]";
|
const alphaNumeric = "[a-z0-9]";
|
||||||
@@ -89063,8 +89065,8 @@ var path8 = __toESM(require("path"));
|
|||||||
var semver4 = __toESM(require_semver2());
|
var semver4 = __toESM(require_semver2());
|
||||||
|
|
||||||
// src/defaults.json
|
// src/defaults.json
|
||||||
var bundleVersion = "codeql-bundle-v2.23.3";
|
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||||
var cliVersion = "2.23.3";
|
var cliVersion = "2.23.2";
|
||||||
|
|
||||||
// src/overlay-database-utils.ts
|
// src/overlay-database-utils.ts
|
||||||
var fs5 = __toESM(require("fs"));
|
var fs5 = __toESM(require("fs"));
|
||||||
@@ -89284,7 +89286,7 @@ function formatDuration(durationMs) {
|
|||||||
|
|
||||||
// src/overlay-database-utils.ts
|
// src/overlay-database-utils.ts
|
||||||
var CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
|
var CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
|
||||||
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
|
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
|
||||||
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
|
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
|
||||||
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
|
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
|
||||||
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
|
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
|
||||||
@@ -89357,11 +89359,6 @@ var featureConfig = {
|
|||||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||||
minimumVersion: void 0
|
minimumVersion: void 0
|
||||||
},
|
},
|
||||||
["analyze_use_new_upload" /* AnalyzeUseNewUpload */]: {
|
|
||||||
defaultValue: false,
|
|
||||||
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
|
|
||||||
minimumVersion: void 0
|
|
||||||
},
|
|
||||||
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||||
@@ -89533,11 +89530,6 @@ var featureConfig = {
|
|||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
||||||
minimumVersion: "2.23.0"
|
minimumVersion: "2.23.0"
|
||||||
},
|
|
||||||
["validate_db_config" /* ValidateDbConfig */]: {
|
|
||||||
defaultValue: false,
|
|
||||||
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
|
|
||||||
minimumVersion: void 0
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -89716,13 +89708,13 @@ async function getTarVersion() {
|
|||||||
}
|
}
|
||||||
if (stdout.includes("GNU tar")) {
|
if (stdout.includes("GNU tar")) {
|
||||||
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
|
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
|
||||||
if (!match?.[1]) {
|
if (!match || !match[1]) {
|
||||||
throw new Error("Failed to parse output of tar --version.");
|
throw new Error("Failed to parse output of tar --version.");
|
||||||
}
|
}
|
||||||
return { type: "gnu", version: match[1] };
|
return { type: "gnu", version: match[1] };
|
||||||
} else if (stdout.includes("bsdtar")) {
|
} else if (stdout.includes("bsdtar")) {
|
||||||
const match = stdout.match(/bsdtar ([0-9.]+)/);
|
const match = stdout.match(/bsdtar ([0-9.]+)/);
|
||||||
if (!match?.[1]) {
|
if (!match || !match[1]) {
|
||||||
throw new Error("Failed to parse output of tar --version.");
|
throw new Error("Failed to parse output of tar --version.");
|
||||||
}
|
}
|
||||||
return { type: "bsd", version: match[1] };
|
return { type: "bsd", version: match[1] };
|
||||||
@@ -90102,7 +90094,7 @@ function tryGetTagNameFromUrl(url2, logger) {
|
|||||||
return void 0;
|
return void 0;
|
||||||
}
|
}
|
||||||
const match = matches[matches.length - 1];
|
const match = matches[matches.length - 1];
|
||||||
if (match?.length !== 2) {
|
if (match === null || match.length !== 2) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
`Could not determine tag name for URL ${url2}. Matched ${JSON.stringify(
|
`Could not determine tag name for URL ${url2}. Matched ${JSON.stringify(
|
||||||
match
|
match
|
||||||
@@ -90562,7 +90554,7 @@ async function shouldEnableIndirectTracing(codeql, config) {
|
|||||||
|
|
||||||
// src/codeql.ts
|
// src/codeql.ts
|
||||||
var cachedCodeQL = void 0;
|
var cachedCodeQL = void 0;
|
||||||
var CODEQL_MINIMUM_VERSION = "2.17.6";
|
var CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||||
var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
|
var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
|
||||||
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
|
||||||
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
|
||||||
@@ -90606,9 +90598,9 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
|
|||||||
toolsVersion,
|
toolsVersion,
|
||||||
zstdAvailability
|
zstdAvailability
|
||||||
};
|
};
|
||||||
} catch (rawError) {
|
} catch (e) {
|
||||||
const e = wrapApiConfigurationError(rawError);
|
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
|
||||||
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
|
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
|
||||||
throw new ErrorClass(
|
throw new ErrorClass(
|
||||||
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
|
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
|
||||||
|
|
||||||
@@ -90897,6 +90889,12 @@ ${output}`
|
|||||||
} else {
|
} else {
|
||||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||||
}
|
}
|
||||||
|
if (!isSupportedToolsFeature(
|
||||||
|
await this.getVersion(),
|
||||||
|
"analysisSummaryV2Default" /* AnalysisSummaryV2IsDefault */
|
||||||
|
)) {
|
||||||
|
codeqlArgs.push("--new-analysis-summary");
|
||||||
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
if (querySuitePaths) {
|
if (querySuitePaths) {
|
||||||
codeqlArgs.push(...querySuitePaths);
|
codeqlArgs.push(...querySuitePaths);
|
||||||
@@ -92446,6 +92444,24 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
|
|||||||
);
|
);
|
||||||
codeQL = initCodeQLResult.codeql;
|
codeQL = initCodeQLResult.codeql;
|
||||||
}
|
}
|
||||||
|
if (!await codeQL.supportsFeature(
|
||||||
|
"sarifMergeRunsFromEqualCategory" /* SarifMergeRunsFromEqualCategory */
|
||||||
|
)) {
|
||||||
|
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
|
||||||
|
logger.warning(
|
||||||
|
"The CodeQL CLI does not support merging SARIF files. Merging files in the action."
|
||||||
|
);
|
||||||
|
if (await shouldShowCombineSarifFilesDeprecationWarning(
|
||||||
|
sarifObjects,
|
||||||
|
gitHubVersion
|
||||||
|
)) {
|
||||||
|
logger.warning(
|
||||||
|
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`
|
||||||
|
);
|
||||||
|
core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
|
||||||
|
}
|
||||||
|
return combineSarifFiles(sarifFiles, logger);
|
||||||
|
}
|
||||||
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
|
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
|
||||||
fs13.mkdirSync(baseTempDir, { recursive: true });
|
fs13.mkdirSync(baseTempDir, { recursive: true });
|
||||||
const outputDirectory = fs13.mkdtempSync(path14.resolve(baseTempDir, "output-"));
|
const outputDirectory = fs13.mkdtempSync(path14.resolve(baseTempDir, "output-"));
|
||||||
@@ -92504,17 +92520,16 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
|
|||||||
logger.info("Successfully uploaded results");
|
logger.info("Successfully uploaded results");
|
||||||
return response.data.id;
|
return response.data.id;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const httpError = asHTTPError(e);
|
if (isHTTPError(e)) {
|
||||||
if (httpError !== void 0) {
|
switch (e.status) {
|
||||||
switch (httpError.status) {
|
|
||||||
case 403:
|
case 403:
|
||||||
core11.warning(httpError.message || GENERIC_403_MSG);
|
core11.warning(e.message || GENERIC_403_MSG);
|
||||||
break;
|
break;
|
||||||
case 404:
|
case 404:
|
||||||
core11.warning(httpError.message || GENERIC_404_MSG);
|
core11.warning(e.message || GENERIC_404_MSG);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
core11.warning(httpError.message);
|
core11.warning(e.message);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -92636,7 +92651,7 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) {
|
|||||||
}
|
}
|
||||||
logger.info(`Validating ${sarifFilePath}`);
|
logger.info(`Validating ${sarifFilePath}`);
|
||||||
const schema2 = require_sarif_schema_2_1_0();
|
const schema2 = require_sarif_schema_2_1_0();
|
||||||
const result = new jsonschema2.Validator().validate(sarif, schema2);
|
const result = new jsonschema.Validator().validate(sarif, schema2);
|
||||||
const warningAttributes = ["uri-reference", "uri"];
|
const warningAttributes = ["uri-reference", "uri"];
|
||||||
const errors = (result.errors ?? []).filter(
|
const errors = (result.errors ?? []).filter(
|
||||||
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
|
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
|
||||||
@@ -92696,11 +92711,26 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||||||
}
|
}
|
||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
|
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
const sarifPaths = getSarifFilePaths(
|
||||||
|
inputSarifPath,
|
||||||
|
uploadTarget.sarifPredicate
|
||||||
|
);
|
||||||
|
return uploadSpecifiedFiles(
|
||||||
|
sarifPaths,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
||||||
|
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
const gitHubVersion = await getGitHubVersion();
|
const gitHubVersion = await getGitHubVersion();
|
||||||
let sarif;
|
let sarif;
|
||||||
category = analysis.fixCategory(logger, category);
|
category = uploadTarget.fixCategory(logger, category);
|
||||||
if (sarifPaths.length > 1) {
|
if (sarifPaths.length > 1) {
|
||||||
for (const sarifPath of sarifPaths) {
|
for (const sarifPath of sarifPaths) {
|
||||||
const parsedSarif = readSarifFile(sarifPath);
|
const parsedSarif = readSarifFile(sarifPath);
|
||||||
@@ -92728,72 +92758,28 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
|
|||||||
analysisKey,
|
analysisKey,
|
||||||
environment
|
environment
|
||||||
);
|
);
|
||||||
return { sarif, analysisKey, environment };
|
|
||||||
}
|
|
||||||
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
|
|
||||||
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
|
|
||||||
if (outputPath !== void 0) {
|
|
||||||
dumpSarifFile(
|
|
||||||
JSON.stringify(postProcessingResults.sarif),
|
|
||||||
outputPath,
|
|
||||||
logger,
|
|
||||||
uploadTarget
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.debug(`Not writing post-processed SARIF files.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
|
|
||||||
const sarifPaths = getSarifFilePaths(
|
|
||||||
inputSarifPath,
|
|
||||||
uploadTarget.sarifPredicate
|
|
||||||
);
|
|
||||||
return uploadSpecifiedFiles(
|
|
||||||
sarifPaths,
|
|
||||||
checkoutPath,
|
|
||||||
category,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
uploadTarget
|
|
||||||
);
|
|
||||||
}
|
|
||||||
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
|
|
||||||
const processingResults = await postProcessSarifFiles(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
checkoutPath,
|
|
||||||
sarifPaths,
|
|
||||||
category,
|
|
||||||
uploadTarget
|
|
||||||
);
|
|
||||||
return uploadPostProcessedFiles(
|
|
||||||
logger,
|
|
||||||
checkoutPath,
|
|
||||||
uploadTarget,
|
|
||||||
processingResults
|
|
||||||
);
|
|
||||||
}
|
|
||||||
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
|
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
|
||||||
const sarif = postProcessingResults.sarif;
|
|
||||||
const toolNames = getToolNames(sarif);
|
const toolNames = getToolNames(sarif);
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
|
||||||
logger.debug(`Serializing SARIF for upload`);
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
const payload = buildPayload(
|
const payload = buildPayload(
|
||||||
await getCommitOid(checkoutPath),
|
await getCommitOid(checkoutPath),
|
||||||
await getRef(),
|
await getRef(),
|
||||||
postProcessingResults.analysisKey,
|
analysisKey,
|
||||||
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
zippedSarif,
|
zippedSarif,
|
||||||
getWorkflowRunID(),
|
getWorkflowRunID(),
|
||||||
getWorkflowRunAttempt(),
|
getWorkflowRunAttempt(),
|
||||||
checkoutURI,
|
checkoutURI,
|
||||||
postProcessingResults.environment,
|
environment,
|
||||||
toolNames,
|
toolNames,
|
||||||
await determineBaseBranchHeadCommitOid()
|
await determineBaseBranchHeadCommitOid()
|
||||||
);
|
);
|
||||||
@@ -92824,14 +92810,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
|
|||||||
fs13.mkdirSync(outputDir, { recursive: true });
|
fs13.mkdirSync(outputDir, { recursive: true });
|
||||||
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
|
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
|
`The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const outputFile = path14.resolve(
|
const outputFile = path14.resolve(
|
||||||
outputDir,
|
outputDir,
|
||||||
`upload${uploadTarget.sarifExtension}`
|
`upload${uploadTarget.sarifExtension}`
|
||||||
);
|
);
|
||||||
logger.info(`Writing processed SARIF file to ${outputFile}`);
|
logger.info(`Dumping processed SARIF file to ${outputFile}`);
|
||||||
fs13.writeFileSync(outputFile, sarifPayload);
|
fs13.writeFileSync(outputFile, sarifPayload);
|
||||||
}
|
}
|
||||||
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
|
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
|
||||||
@@ -92993,7 +92979,6 @@ function filterAlertsByDiffRange(logger, sarif) {
|
|||||||
getGroupedSarifFilePaths,
|
getGroupedSarifFilePaths,
|
||||||
getSarifFilePaths,
|
getSarifFilePaths,
|
||||||
populateRunAutomationDetails,
|
populateRunAutomationDetails,
|
||||||
postProcessSarifFiles,
|
|
||||||
readSarifFile,
|
readSarifFile,
|
||||||
shouldConsiderConfigurationError,
|
shouldConsiderConfigurationError,
|
||||||
shouldConsiderInvalidRequest,
|
shouldConsiderInvalidRequest,
|
||||||
@@ -93001,11 +92986,10 @@ function filterAlertsByDiffRange(logger, sarif) {
|
|||||||
throwIfCombineSarifFilesDisabled,
|
throwIfCombineSarifFilesDisabled,
|
||||||
uploadFiles,
|
uploadFiles,
|
||||||
uploadPayload,
|
uploadPayload,
|
||||||
uploadPostProcessedFiles,
|
uploadSpecifiedFiles,
|
||||||
validateSarifFileSchema,
|
validateSarifFileSchema,
|
||||||
validateUniqueCategory,
|
validateUniqueCategory,
|
||||||
waitForProcessing,
|
waitForProcessing
|
||||||
writePostProcessedFiles
|
|
||||||
});
|
});
|
||||||
/*! Bundled license information:
|
/*! Bundled license information:
|
||||||
|
|
||||||
|
|||||||
1324
lib/upload-sarif-action-post.js
generated
1324
lib/upload-sarif-action-post.js
generated
File diff suppressed because it is too large
Load Diff
689
lib/upload-sarif-action.js
generated
689
lib/upload-sarif-action.js
generated
File diff suppressed because it is too large
Load Diff
966
package-lock.json
generated
966
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "4.31.1",
|
"version": "4.30.9",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "CodeQL action",
|
"description": "CodeQL action",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -38,6 +38,7 @@
|
|||||||
"@octokit/request-error": "^7.0.1",
|
"@octokit/request-error": "^7.0.1",
|
||||||
"@schemastore/package": "0.0.10",
|
"@schemastore/package": "0.0.10",
|
||||||
"archiver": "^7.0.1",
|
"archiver": "^7.0.1",
|
||||||
|
"check-disk-space": "^3.4.0",
|
||||||
"console-log-level": "^1.4.1",
|
"console-log-level": "^1.4.1",
|
||||||
"del": "^8.0.0",
|
"del": "^8.0.0",
|
||||||
"fast-deep-equal": "^3.1.3",
|
"fast-deep-equal": "^3.1.3",
|
||||||
@@ -47,7 +48,7 @@
|
|||||||
"jsonschema": "1.4.1",
|
"jsonschema": "1.4.1",
|
||||||
"long": "^5.3.2",
|
"long": "^5.3.2",
|
||||||
"node-forge": "^1.3.1",
|
"node-forge": "^1.3.1",
|
||||||
"octokit": "^5.0.4",
|
"octokit": "^5.0.3",
|
||||||
"semver": "^7.7.3",
|
"semver": "^7.7.3",
|
||||||
"uuid": "^13.0.0"
|
"uuid": "^13.0.0"
|
||||||
},
|
},
|
||||||
@@ -55,7 +56,7 @@
|
|||||||
"@ava/typescript": "6.0.0",
|
"@ava/typescript": "6.0.0",
|
||||||
"@eslint/compat": "^1.4.0",
|
"@eslint/compat": "^1.4.0",
|
||||||
"@eslint/eslintrc": "^3.3.1",
|
"@eslint/eslintrc": "^3.3.1",
|
||||||
"@eslint/js": "^9.38.0",
|
"@eslint/js": "^9.37.0",
|
||||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||||
"@octokit/types": "^15.0.0",
|
"@octokit/types": "^15.0.0",
|
||||||
"@types/archiver": "^6.0.3",
|
"@types/archiver": "^6.0.3",
|
||||||
@@ -66,10 +67,10 @@
|
|||||||
"@types/node-forge": "^1.3.14",
|
"@types/node-forge": "^1.3.14",
|
||||||
"@types/semver": "^7.7.1",
|
"@types/semver": "^7.7.1",
|
||||||
"@types/sinon": "^17.0.4",
|
"@types/sinon": "^17.0.4",
|
||||||
"@typescript-eslint/eslint-plugin": "^8.46.1",
|
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||||
"@typescript-eslint/parser": "^8.41.0",
|
"@typescript-eslint/parser": "^8.41.0",
|
||||||
"ava": "^6.4.1",
|
"ava": "^6.4.1",
|
||||||
"esbuild": "^0.25.11",
|
"esbuild": "^0.25.10",
|
||||||
"eslint": "^8.57.1",
|
"eslint": "^8.57.1",
|
||||||
"eslint-import-resolver-typescript": "^3.8.7",
|
"eslint-import-resolver-typescript": "^3.8.7",
|
||||||
"eslint-plugin-filenames": "^1.3.2",
|
"eslint-plugin-filenames": "^1.3.2",
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Analyze: 'ref' and 'sha' from inputs"
|
|||||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -15,9 +15,10 @@ steps:
|
|||||||
if (allCodeqlVersions.length === 0) {
|
if (allCodeqlVersions.length === 0) {
|
||||||
throw new Error(`CodeQL could not be found in the toolcache`);
|
throw new Error(`CodeQL could not be found in the toolcache`);
|
||||||
}
|
}
|
||||||
- id: setup-codeql
|
- id: init
|
||||||
uses: ./../action/setup-codeql
|
uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- name: Check CodeQL is installed within the toolcache
|
- name: Check CodeQL is installed within the toolcache
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Local CodeQL bundle"
|
|||||||
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
description: "Tests using a CodeQL bundle from a local file rather than a URL"
|
||||||
versions: ["linked"]
|
versions: ["linked"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- name: Fetch latest CodeQL bundle
|
- name: Fetch latest CodeQL bundle
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ operatingSystems: ["macos", "ubuntu"]
|
|||||||
env:
|
env:
|
||||||
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- name: Use Xcode 16
|
- name: Use Xcode 16
|
||||||
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
if: runner.os == 'macOS' && matrix.version != 'nightly-latest'
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ description: "Checks that specifying packages using a combination of a config fi
|
|||||||
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
|
||||||
installGo: true
|
installGo: true
|
||||||
installNode: true
|
installNode: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ steps:
|
|||||||
with:
|
with:
|
||||||
output: "${{ runner.temp }}/results"
|
output: "${{ runner.temp }}/results"
|
||||||
upload-database: false
|
upload-database: false
|
||||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
|
||||||
- name: Upload security SARIF
|
- name: Upload security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -53,14 +52,6 @@ steps:
|
|||||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
- name: Upload post-processed SARIF
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: |
|
|
||||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
|
||||||
path: "${{ runner.temp }}/post-processed"
|
|
||||||
retention-days: 7
|
|
||||||
if-no-files-found: error
|
|
||||||
- name: Check quality query does not appear in security SARIF
|
- name: Check quality query does not appear in security SARIF
|
||||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ versions:
|
|||||||
- linked
|
- linked
|
||||||
- nightly-latest
|
- nightly-latest
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ versions:
|
|||||||
- linked
|
- linked
|
||||||
- nightly-latest
|
- nightly-latest
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
id: init
|
id: init
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
|||||||
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
description: "Checks that specifying 'ref' and 'sha' as inputs works"
|
||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ description: "Checks that uploading SARIFs to the code quality endpoint works"
|
|||||||
versions: ["default"]
|
versions: ["default"]
|
||||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ name: "Use a custom `checkout_path`"
|
|||||||
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
|
||||||
versions: ["linked"]
|
versions: ["linked"]
|
||||||
installGo: true
|
installGo: true
|
||||||
installPython: true
|
|
||||||
steps:
|
steps:
|
||||||
# This ensures we don't accidentally use the original checkout for any part of the test.
|
# This ensures we don't accidentally use the original checkout for any part of the test.
|
||||||
- name: Delete original checkout
|
- name: Delete original checkout
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
|||||||
steps.extend([
|
steps.extend([
|
||||||
{
|
{
|
||||||
'name': 'Install Node.js',
|
'name': 'Install Node.js',
|
||||||
'uses': 'actions/setup-node@v6',
|
'uses': 'actions/setup-node@v5',
|
||||||
'with': {
|
'with': {
|
||||||
'node-version': '20.x',
|
'node-version': '20.x',
|
||||||
'cache': 'npm',
|
'cache': 'npm',
|
||||||
@@ -184,26 +184,6 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
installPython = is_truthy(checkSpecification.get('installPython', ''))
|
|
||||||
|
|
||||||
if installPython:
|
|
||||||
basePythonVersionExpr = '3.13'
|
|
||||||
workflowInputs['python-version'] = {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'The version of Python to install',
|
|
||||||
'required': False,
|
|
||||||
'default': basePythonVersionExpr,
|
|
||||||
}
|
|
||||||
|
|
||||||
steps.append({
|
|
||||||
'name': 'Install Python',
|
|
||||||
'if': 'matrix.version != \'nightly-latest\'',
|
|
||||||
'uses': 'actions/setup-python@v6',
|
|
||||||
'with': {
|
|
||||||
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
# If container initialisation steps are present in the check specification,
|
# If container initialisation steps are present in the check specification,
|
||||||
# make sure to execute them first.
|
# make sure to execute them first.
|
||||||
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
name: 'CodeQL: Setup'
|
|
||||||
description: 'Installs the CodeQL CLI'
|
|
||||||
author: 'GitHub'
|
|
||||||
inputs:
|
|
||||||
tools:
|
|
||||||
description: >-
|
|
||||||
By default, the Action will use the recommended version of the CodeQL
|
|
||||||
Bundle to analyze your project. You can override this choice using this
|
|
||||||
input. One of:
|
|
||||||
|
|
||||||
- A local path to a CodeQL Bundle tarball, or
|
|
||||||
- The URL of a CodeQL Bundle tarball GitHub release asset, or
|
|
||||||
- A special value `linked` which uses the version of the CodeQL tools
|
|
||||||
that the Action has been bundled with.
|
|
||||||
- A special value `nightly` which uses the latest nightly version of the
|
|
||||||
CodeQL tools. Note that this is unstable and not recommended for
|
|
||||||
production use.
|
|
||||||
|
|
||||||
If not specified, the Action will check in several places until it finds
|
|
||||||
the CodeQL tools.
|
|
||||||
required: false
|
|
||||||
token:
|
|
||||||
description: GitHub token to use for authenticating with this instance of GitHub.
|
|
||||||
default: ${{ github.token }}
|
|
||||||
required: false
|
|
||||||
matrix:
|
|
||||||
default: ${{ toJson(matrix) }}
|
|
||||||
required: false
|
|
||||||
external-repository-token:
|
|
||||||
description: A token for fetching additional files from private repositories in the same GitHub instance that is running this action.
|
|
||||||
required: false
|
|
||||||
outputs:
|
|
||||||
codeql-path:
|
|
||||||
description: The path of the CodeQL binary that was installed.
|
|
||||||
codeql-version:
|
|
||||||
description: The version of the CodeQL binary that was installed.
|
|
||||||
runs:
|
|
||||||
using: node24
|
|
||||||
main: '../lib/setup-codeql-action.js'
|
|
||||||
@@ -1,19 +1,12 @@
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
|
||||||
import {
|
import {
|
||||||
AnalysisKind,
|
AnalysisKind,
|
||||||
getAnalysisKinds,
|
|
||||||
parseAnalysisKinds,
|
parseAnalysisKinds,
|
||||||
supportedAnalysisKinds,
|
supportedAnalysisKinds,
|
||||||
} from "./analyses";
|
} from "./analyses";
|
||||||
import { getRunnerLogger } from "./logging";
|
|
||||||
import { setupTests } from "./testing-utils";
|
|
||||||
import { ConfigurationError } from "./util";
|
import { ConfigurationError } from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
|
||||||
|
|
||||||
test("All known analysis kinds can be parsed successfully", async (t) => {
|
test("All known analysis kinds can be parsed successfully", async (t) => {
|
||||||
for (const analysisKind of supportedAnalysisKinds) {
|
for (const analysisKind of supportedAnalysisKinds) {
|
||||||
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
t.deepEqual(await parseAnalysisKinds(analysisKind), [analysisKind]);
|
||||||
@@ -41,29 +34,3 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
|
|||||||
instanceOf: ConfigurationError,
|
instanceOf: ConfigurationError,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub
|
|
||||||
.withArgs("analysis-kinds")
|
|
||||||
.returns("code-scanning,code-quality");
|
|
||||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
|
|
||||||
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
|
|
||||||
optionalInputStub.withArgs("quality-queries").returns("code-quality");
|
|
||||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeScanning));
|
|
||||||
t.assert(result.includes(AnalysisKind.CodeQuality));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
|
|
||||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
|
||||||
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
|
|
||||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,8 +1,4 @@
|
|||||||
import {
|
import { fixCodeQualityCategory } from "./actions-util";
|
||||||
fixCodeQualityCategory,
|
|
||||||
getOptionalInput,
|
|
||||||
getRequiredInput,
|
|
||||||
} from "./actions-util";
|
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { ConfigurationError } from "./util";
|
import { ConfigurationError } from "./util";
|
||||||
|
|
||||||
@@ -45,55 +41,6 @@ export async function parseAnalysisKinds(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used to avoid re-parsing the input after we have done it once.
|
|
||||||
let cachedAnalysisKinds: AnalysisKind[] | undefined;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialises the analysis kinds for the analysis based on the `analysis-kinds` input.
|
|
||||||
* This function will also use the deprecated `quality-queries` input as an indicator to enable `code-quality`.
|
|
||||||
* If the `analysis-kinds` input cannot be parsed, a `ConfigurationError` is thrown.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param skipCache For testing, whether to ignore the cached values (default: false).
|
|
||||||
*
|
|
||||||
* @returns The array of enabled analysis kinds.
|
|
||||||
* @throws A `ConfigurationError` if the `analysis-kinds` input cannot be parsed.
|
|
||||||
*/
|
|
||||||
export async function getAnalysisKinds(
|
|
||||||
logger: Logger,
|
|
||||||
skipCache: boolean = false,
|
|
||||||
): Promise<AnalysisKind[]> {
|
|
||||||
if (!skipCache && cachedAnalysisKinds !== undefined) {
|
|
||||||
return cachedAnalysisKinds;
|
|
||||||
}
|
|
||||||
|
|
||||||
cachedAnalysisKinds = await parseAnalysisKinds(
|
|
||||||
getRequiredInput("analysis-kinds"),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
|
||||||
const qualityQueriesInput = getOptionalInput("quality-queries");
|
|
||||||
|
|
||||||
if (qualityQueriesInput !== undefined) {
|
|
||||||
logger.warning(
|
|
||||||
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
|
||||||
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
|
||||||
// if an input to `quality-queries` was specified. We should remove this once
|
|
||||||
// `quality-queries` is no longer used.
|
|
||||||
if (
|
|
||||||
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
|
|
||||||
qualityQueriesInput !== undefined
|
|
||||||
) {
|
|
||||||
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
|
|
||||||
}
|
|
||||||
|
|
||||||
return cachedAnalysisKinds;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** The queries to use for Code Quality analyses. */
|
/** The queries to use for Code Quality analyses. */
|
||||||
export const codeQualityQueries: string[] = ["code-quality"];
|
export const codeQualityQueries: string[] = ["code-quality"];
|
||||||
|
|
||||||
|
|||||||
@@ -24,9 +24,6 @@ setupTests(test);
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
|
|
||||||
test("analyze action with RAM & threads from environment variables", async (t) => {
|
test("analyze action with RAM & threads from environment variables", async (t) => {
|
||||||
// This test frequently times out on Windows with the default timeout, so we bump
|
|
||||||
// it a bit to 20s.
|
|
||||||
t.timeout(1000 * 20);
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ setupTests(test);
|
|||||||
// but the first test would fail.
|
// but the first test would fail.
|
||||||
|
|
||||||
test("analyze action with RAM & threads from action inputs", async (t) => {
|
test("analyze action with RAM & threads from action inputs", async (t) => {
|
||||||
t.timeout(1000 * 20);
|
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
|
||||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
|||||||
@@ -52,7 +52,6 @@ import {
|
|||||||
} from "./trap-caching";
|
} from "./trap-caching";
|
||||||
import * as uploadLib from "./upload-lib";
|
import * as uploadLib from "./upload-lib";
|
||||||
import { UploadResult } from "./upload-lib";
|
import { UploadResult } from "./upload-lib";
|
||||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
interface AnalysisStatusReport
|
interface AnalysisStatusReport
|
||||||
@@ -212,9 +211,7 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
|
|||||||
|
|
||||||
async function run() {
|
async function run() {
|
||||||
const startedAt = new Date();
|
const startedAt = new Date();
|
||||||
let uploadResults:
|
let uploadResult: UploadResult | undefined = undefined;
|
||||||
| Partial<Record<analyses.AnalysisKind, UploadResult>>
|
|
||||||
| undefined = undefined;
|
|
||||||
let runStats: QueriesStatusReport | undefined = undefined;
|
let runStats: QueriesStatusReport | undefined = undefined;
|
||||||
let config: Config | undefined = undefined;
|
let config: Config | undefined = undefined;
|
||||||
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
|
||||||
@@ -344,67 +341,31 @@ async function run() {
|
|||||||
}
|
}
|
||||||
core.setOutput("db-locations", dbLocations);
|
core.setOutput("db-locations", dbLocations);
|
||||||
core.setOutput("sarif-output", path.resolve(outputDir));
|
core.setOutput("sarif-output", path.resolve(outputDir));
|
||||||
const uploadKind = actionsUtil.getUploadValue(
|
const uploadInput = actionsUtil.getOptionalInput("upload");
|
||||||
actionsUtil.getOptionalInput("upload"),
|
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
|
||||||
);
|
|
||||||
if (runStats) {
|
|
||||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
|
||||||
const category = actionsUtil.getOptionalInput("category");
|
|
||||||
|
|
||||||
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
|
|
||||||
uploadResults = await postProcessAndUploadSarif(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
uploadKind,
|
|
||||||
checkoutPath,
|
|
||||||
outputDir,
|
|
||||||
category,
|
|
||||||
actionsUtil.getOptionalInput("post-processed-sarif-path"),
|
|
||||||
);
|
|
||||||
} else if (uploadKind === "always") {
|
|
||||||
uploadResults = {};
|
|
||||||
|
|
||||||
if (isCodeScanningEnabled(config)) {
|
if (isCodeScanningEnabled(config)) {
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning] =
|
uploadResult = await uploadLib.uploadFiles(
|
||||||
await uploadLib.uploadFiles(
|
|
||||||
outputDir,
|
outputDir,
|
||||||
checkoutPath,
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
category,
|
actionsUtil.getOptionalInput("category"),
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
analyses.CodeScanning,
|
analyses.CodeScanning,
|
||||||
);
|
);
|
||||||
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isCodeQualityEnabled(config)) {
|
if (isCodeQualityEnabled(config)) {
|
||||||
uploadResults[analyses.AnalysisKind.CodeQuality] =
|
const analysis = analyses.CodeQuality;
|
||||||
await uploadLib.uploadFiles(
|
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||||
outputDir,
|
outputDir,
|
||||||
checkoutPath,
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
category,
|
actionsUtil.getOptionalInput("category"),
|
||||||
features,
|
features,
|
||||||
logger,
|
logger,
|
||||||
analyses.CodeQuality,
|
analysis,
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uploadResults = {};
|
|
||||||
logger.info("Not uploading results");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set the SARIF id outputs only if we have results for them, to avoid
|
|
||||||
// having keys with empty values in the action output.
|
|
||||||
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
|
|
||||||
core.setOutput(
|
|
||||||
"sarif-id",
|
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
|
|
||||||
core.setOutput(
|
|
||||||
"quality-sarif-id",
|
|
||||||
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
|
|
||||||
);
|
);
|
||||||
|
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
@@ -447,12 +408,12 @@ async function run() {
|
|||||||
if (util.isInTestMode()) {
|
if (util.isInTestMode()) {
|
||||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||||
} else if (
|
} else if (
|
||||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
|
uploadResult !== undefined &&
|
||||||
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
actionsUtil.getRequiredInput("wait-for-processing") === "true"
|
||||||
) {
|
) {
|
||||||
await uploadLib.waitForProcessing(
|
await uploadLib.waitForProcessing(
|
||||||
getRepositoryNwo(),
|
getRepositoryNwo(),
|
||||||
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
|
uploadResult.sarifID,
|
||||||
getActionsLogger(),
|
getActionsLogger(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -489,16 +450,13 @@ async function run() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (runStats && uploadResult) {
|
||||||
runStats !== undefined &&
|
|
||||||
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
|
|
||||||
) {
|
|
||||||
await sendStatusReport(
|
await sendStatusReport(
|
||||||
startedAt,
|
startedAt,
|
||||||
config,
|
config,
|
||||||
{
|
{
|
||||||
...runStats,
|
...runStats,
|
||||||
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
|
...uploadResult.statusReport,
|
||||||
},
|
},
|
||||||
undefined,
|
undefined,
|
||||||
trapCacheUploadTime,
|
trapCacheUploadTime,
|
||||||
@@ -508,7 +466,7 @@ async function run() {
|
|||||||
dependencyCacheResults,
|
dependencyCacheResults,
|
||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
} else if (runStats !== undefined) {
|
} else if (runStats) {
|
||||||
await sendStatusReport(
|
await sendStatusReport(
|
||||||
startedAt,
|
startedAt,
|
||||||
config,
|
config,
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ import * as path from "path";
|
|||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
|
import * as actionsUtil from "./actions-util";
|
||||||
import { CodeQuality, CodeScanning } from "./analyses";
|
import { CodeQuality, CodeScanning } from "./analyses";
|
||||||
import {
|
import {
|
||||||
|
exportedForTesting,
|
||||||
runQueries,
|
runQueries,
|
||||||
defaultSuites,
|
defaultSuites,
|
||||||
resolveQuerySuiteAlias,
|
resolveQuerySuiteAlias,
|
||||||
@@ -129,6 +131,204 @@ test("status report fields", async (t) => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
||||||
|
sinon
|
||||||
|
.stub(actionsUtil, "getRequiredInput")
|
||||||
|
.withArgs("checkout_path")
|
||||||
|
.returns("/checkout/path");
|
||||||
|
return exportedForTesting.getDiffRanges(
|
||||||
|
{
|
||||||
|
filename: "test.txt",
|
||||||
|
changes,
|
||||||
|
patch: patch?.join("\n"),
|
||||||
|
},
|
||||||
|
getRunnerLogger(true),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
test("getDiffRanges: file unchanged", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(0, undefined);
|
||||||
|
t.deepEqual(diffRanges, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: file diff too large", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 0,
|
||||||
|
endLine: 0,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,6 +50,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,8 +50,6 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
"-2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,7 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 53,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,9 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
" c",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 53,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 55,
|
||||||
|
endLine: 55,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,7 +50,7 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"-1",
|
||||||
|
" d",
|
||||||
|
"-2",
|
||||||
|
"+3",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
"+4",
|
||||||
|
"+5",
|
||||||
|
" g",
|
||||||
|
" h",
|
||||||
|
" i",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 54,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 57,
|
||||||
|
endLine: 58,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: multiple diff thunks", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, [
|
||||||
|
"@@ -30,6 +50,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
"@@ -130,6 +150,8 @@",
|
||||||
|
" a",
|
||||||
|
" b",
|
||||||
|
" c",
|
||||||
|
"+1",
|
||||||
|
"+2",
|
||||||
|
" d",
|
||||||
|
" e",
|
||||||
|
" f",
|
||||||
|
]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 53,
|
||||||
|
endLine: 54,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 153,
|
||||||
|
endLine: 154,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: no diff context lines", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||||
|
t.deepEqual(diffRanges, [
|
||||||
|
{
|
||||||
|
path: "/checkout/path/test.txt",
|
||||||
|
startLine: 50,
|
||||||
|
endLine: 51,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getDiffRanges: malformed thunk header", async (t) => {
|
||||||
|
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
||||||
|
t.deepEqual(diffRanges, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test("resolveQuerySuiteAlias", (t) => {
|
test("resolveQuerySuiteAlias", (t) => {
|
||||||
// default query suite names should resolve to something language-specific ending in `.qls`.
|
// default query suite names should resolve to something language-specific ending in `.qls`.
|
||||||
for (const suite of defaultSuites) {
|
for (const suite of defaultSuites) {
|
||||||
|
|||||||
192
src/analyze.ts
192
src/analyze.ts
@@ -6,8 +6,13 @@ import * as io from "@actions/io";
|
|||||||
import * as del from "del";
|
import * as del from "del";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import { getTemporaryDirectory, PullRequestBranches } from "./actions-util";
|
import {
|
||||||
|
getRequiredInput,
|
||||||
|
getTemporaryDirectory,
|
||||||
|
PullRequestBranches,
|
||||||
|
} from "./actions-util";
|
||||||
import * as analyses from "./analyses";
|
import * as analyses from "./analyses";
|
||||||
|
import { getApiClient } from "./api-client";
|
||||||
import { setupCppAutobuild } from "./autobuild";
|
import { setupCppAutobuild } from "./autobuild";
|
||||||
import { type CodeQL } from "./codeql";
|
import { type CodeQL } from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
@@ -16,13 +21,13 @@ import { addDiagnostic, makeDiagnostic } from "./diagnostics";
|
|||||||
import {
|
import {
|
||||||
DiffThunkRange,
|
DiffThunkRange,
|
||||||
writeDiffRangesJsonFile,
|
writeDiffRangesJsonFile,
|
||||||
getPullRequestEditedDiffRanges,
|
|
||||||
} from "./diff-informed-analysis-utils";
|
} from "./diff-informed-analysis-utils";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||||
import { KnownLanguage, Language } from "./languages";
|
import { KnownLanguage, Language } from "./languages";
|
||||||
import { Logger, withGroupAsync } from "./logging";
|
import { Logger, withGroupAsync } from "./logging";
|
||||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||||
|
import { getRepositoryNwoFromEnv } from "./repository";
|
||||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||||
import { endTracingForCluster } from "./tracer-config";
|
import { endTracingForCluster } from "./tracer-config";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
@@ -308,6 +313,185 @@ export async function setupDiffInformedQueryRun(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return the file line ranges that were added or modified in the pull request.
|
||||||
|
*
|
||||||
|
* @param branches The base and head branches of the pull request.
|
||||||
|
* @param logger
|
||||||
|
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||||
|
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||||
|
* added or modified range in that file. Returns `undefined` if the action was
|
||||||
|
* not triggered by a pull request or if there was an error.
|
||||||
|
*/
|
||||||
|
async function getPullRequestEditedDiffRanges(
|
||||||
|
branches: PullRequestBranches,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<DiffThunkRange[] | undefined> {
|
||||||
|
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
||||||
|
if (fileDiffs === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (fileDiffs.length >= 300) {
|
||||||
|
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||||
|
// we see that many changed files, it is possible that there could be more,
|
||||||
|
// with the rest being truncated. In this case, we should not attempt to
|
||||||
|
// compute the diff ranges, as the result would be incomplete.
|
||||||
|
logger.warning(
|
||||||
|
`Cannot retrieve the full diff because there are too many ` +
|
||||||
|
`(${fileDiffs.length}) changed files in the pull request.`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const results: DiffThunkRange[] = [];
|
||||||
|
for (const filediff of fileDiffs) {
|
||||||
|
const diffRanges = getDiffRanges(filediff, logger);
|
||||||
|
if (diffRanges === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
results.push(...diffRanges);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This interface is an abbreviated version of the file diff object returned by
|
||||||
|
* the GitHub API.
|
||||||
|
*/
|
||||||
|
interface FileDiff {
|
||||||
|
filename: string;
|
||||||
|
changes: number;
|
||||||
|
// A patch may be absent if the file is binary, if the file diff is too large,
|
||||||
|
// or if the file is unchanged.
|
||||||
|
patch?: string | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getFileDiffsWithBasehead(
|
||||||
|
branches: PullRequestBranches,
|
||||||
|
logger: Logger,
|
||||||
|
): Promise<FileDiff[] | undefined> {
|
||||||
|
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
||||||
|
// to GITHUB_REPOSITORY.
|
||||||
|
const repositoryNwo = getRepositoryNwoFromEnv(
|
||||||
|
"CODE_SCANNING_REPOSITORY",
|
||||||
|
"GITHUB_REPOSITORY",
|
||||||
|
);
|
||||||
|
const basehead = `${branches.base}...${branches.head}`;
|
||||||
|
try {
|
||||||
|
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
||||||
|
{
|
||||||
|
owner: repositoryNwo.owner,
|
||||||
|
repo: repositoryNwo.repo,
|
||||||
|
basehead,
|
||||||
|
per_page: 1,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
logger.debug(
|
||||||
|
`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||||
|
`\n${JSON.stringify(response, null, 2)}`,
|
||||||
|
);
|
||||||
|
return response.data.files;
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.status) {
|
||||||
|
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||||
|
logger.debug(
|
||||||
|
`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||||
|
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||||
|
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDiffRanges(
|
||||||
|
fileDiff: FileDiff,
|
||||||
|
logger: Logger,
|
||||||
|
): DiffThunkRange[] | undefined {
|
||||||
|
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||||
|
// uses forward slashes as the path separator, so on Windows we need to
|
||||||
|
// replace any backslashes with forward slashes.
|
||||||
|
const filename = path
|
||||||
|
.join(getRequiredInput("checkout_path"), fileDiff.filename)
|
||||||
|
.replaceAll(path.sep, "/");
|
||||||
|
|
||||||
|
if (fileDiff.patch === undefined) {
|
||||||
|
if (fileDiff.changes === 0) {
|
||||||
|
// There are situations where a changed file legitimately has no diff.
|
||||||
|
// For example, the file may be a binary file, or that the file may have
|
||||||
|
// been renamed with no changes to its contents. In these cases, the
|
||||||
|
// file would be reported as having 0 changes, and we can return an empty
|
||||||
|
// array to indicate no diff range in this file.
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
// If a file is reported to have nonzero changes but no patch, that may be
|
||||||
|
// due to the file diff being too large. In this case, we should fall back
|
||||||
|
// to a special diff range that covers the entire file.
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
path: filename,
|
||||||
|
startLine: 0,
|
||||||
|
endLine: 0,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// The 1-based file line number of the current line
|
||||||
|
let currentLine = 0;
|
||||||
|
// The 1-based file line number that starts the current range of added lines
|
||||||
|
let additionRangeStartLine: number | undefined = undefined;
|
||||||
|
const diffRanges: DiffThunkRange[] = [];
|
||||||
|
|
||||||
|
const diffLines = fileDiff.patch.split("\n");
|
||||||
|
// Adding a fake context line at the end ensures that the following loop will
|
||||||
|
// always terminate the last range of added lines.
|
||||||
|
diffLines.push(" ");
|
||||||
|
|
||||||
|
for (const diffLine of diffLines) {
|
||||||
|
if (diffLine.startsWith("-")) {
|
||||||
|
// Ignore deletions completely -- we do not even want to consider them when
|
||||||
|
// calculating consecutive ranges of added lines.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith("+")) {
|
||||||
|
if (additionRangeStartLine === undefined) {
|
||||||
|
additionRangeStartLine = currentLine;
|
||||||
|
}
|
||||||
|
currentLine++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (additionRangeStartLine !== undefined) {
|
||||||
|
// Any line that does not start with a "+" or "-" terminates the current
|
||||||
|
// range of added lines.
|
||||||
|
diffRanges.push({
|
||||||
|
path: filename,
|
||||||
|
startLine: additionRangeStartLine,
|
||||||
|
endLine: currentLine - 1,
|
||||||
|
});
|
||||||
|
additionRangeStartLine = undefined;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith("@@ ")) {
|
||||||
|
// A new hunk header line resets the current line number.
|
||||||
|
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||||
|
if (match === null) {
|
||||||
|
logger.warning(
|
||||||
|
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
||||||
|
);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
currentLine = parseInt(match[1], 10);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (diffLine.startsWith(" ")) {
|
||||||
|
// An unchanged context line advances the current line number.
|
||||||
|
currentLine++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return diffRanges;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an extension pack in the temporary directory that contains the file
|
* Create an extension pack in the temporary directory that contains the file
|
||||||
* line ranges that were added or modified in the pull request.
|
* line ranges that were added or modified in the pull request.
|
||||||
@@ -738,3 +922,7 @@ export async function warnIfGoInstalledAfterInit(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const exportedForTesting = {
|
||||||
|
getDiffRanges,
|
||||||
|
};
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ import { getActionVersion, getRequiredInput } from "./actions-util";
|
|||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
import {
|
import {
|
||||||
asHTTPError,
|
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
GITHUB_DOTCOM_URL,
|
GITHUB_DOTCOM_URL,
|
||||||
GitHubVariant,
|
GitHubVariant,
|
||||||
GitHubVersion,
|
GitHubVersion,
|
||||||
|
isHTTPError,
|
||||||
parseGitHubUrl,
|
parseGitHubUrl,
|
||||||
parseMatrixInput,
|
parseMatrixInput,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
@@ -280,29 +280,22 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function wrapApiConfigurationError(e: unknown) {
|
export function wrapApiConfigurationError(e: unknown) {
|
||||||
const httpError = asHTTPError(e);
|
if (isHTTPError(e)) {
|
||||||
if (httpError !== undefined) {
|
|
||||||
if (
|
if (
|
||||||
[
|
e.message.includes("API rate limit exceeded for installation") ||
|
||||||
/API rate limit exceeded/,
|
e.message.includes("commit not found") ||
|
||||||
/commit not found/,
|
e.message.includes("Resource not accessible by integration") ||
|
||||||
/Resource not accessible by integration/,
|
/ref .* not found in this repository/.test(e.message)
|
||||||
/ref .* not found in this repository/,
|
|
||||||
].some((pattern) => pattern.test(httpError.message))
|
|
||||||
) {
|
) {
|
||||||
return new ConfigurationError(httpError.message);
|
return new ConfigurationError(e.message);
|
||||||
}
|
} else if (
|
||||||
if (
|
e.message.includes("Bad credentials") ||
|
||||||
httpError.message.includes("Bad credentials") ||
|
e.message.includes("Not Found")
|
||||||
httpError.message.includes("Not Found")
|
|
||||||
) {
|
) {
|
||||||
return new ConfigurationError(
|
return new ConfigurationError(
|
||||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (httpError.status === 429) {
|
|
||||||
return new ConfigurationError("API rate limit exceeded");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -310,20 +310,6 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
|
|||||||
t.true(wrappedError instanceof ConfigurationError);
|
t.true(wrappedError instanceof ConfigurationError);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("wrapCliConfigurationError - unknown query file", (t) => {
|
|
||||||
const commandError = new CommandInvocationError(
|
|
||||||
"codeql",
|
|
||||||
["database", "init"],
|
|
||||||
2,
|
|
||||||
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
|
|
||||||
);
|
|
||||||
const cliError = new CliError(commandError);
|
|
||||||
|
|
||||||
const wrappedError = wrapCliConfigurationError(cliError);
|
|
||||||
|
|
||||||
t.true(wrappedError instanceof ConfigurationError);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
test("wrapCliConfigurationError - pack missing auth", (t) => {
|
||||||
const commandError = new CommandInvocationError(
|
const commandError = new CommandInvocationError(
|
||||||
"codeql",
|
"codeql",
|
||||||
|
|||||||
@@ -264,9 +264,6 @@ export const cliErrorsConfig: Record<
|
|||||||
new RegExp(
|
new RegExp(
|
||||||
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
|
||||||
),
|
),
|
||||||
new RegExp(
|
|
||||||
"is not a .ql file, .qls file, a directory, or a query pack specification.",
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
[CliConfigErrorCategory.PackMissingAuth]: {
|
[CliConfigErrorCategory.PackMissingAuth]: {
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ import {
|
|||||||
createTestConfig,
|
createTestConfig,
|
||||||
} from "./testing-utils";
|
} from "./testing-utils";
|
||||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||||
|
import { ToolsFeature } from "./tools-features";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { initializeEnvironment } from "./util";
|
import { initializeEnvironment } from "./util";
|
||||||
|
|
||||||
@@ -869,6 +870,84 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0", {
|
||||||
|
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
|
||||||
|
}),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
flagPassed: false,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
flagPassed: true,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||||
|
githubVersion: {
|
||||||
|
type: util.GitHubVariant.GHES,
|
||||||
|
version: "3.10.0",
|
||||||
|
},
|
||||||
|
flagPassed: true,
|
||||||
|
negativeFlagPassed: false,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const {
|
||||||
|
codeqlVersion,
|
||||||
|
flagPassed,
|
||||||
|
githubVersion,
|
||||||
|
negativeFlagPassed,
|
||||||
|
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
|
||||||
|
test(`database interpret-results passes ${
|
||||||
|
flagPassed
|
||||||
|
? "--new-analysis-summary"
|
||||||
|
: negativeFlagPassed
|
||||||
|
? "--no-new-analysis-summary"
|
||||||
|
: "nothing"
|
||||||
|
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
|
||||||
|
util.GitHubVariant[githubVersion.type]
|
||||||
|
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||||
|
// io throws because of the test CodeQL object.
|
||||||
|
sinon.stub(io, "which").resolves("");
|
||||||
|
await codeqlObject.databaseInterpretResults(
|
||||||
|
"",
|
||||||
|
[],
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"-v",
|
||||||
|
undefined,
|
||||||
|
"",
|
||||||
|
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
|
||||||
|
createFeatures([]),
|
||||||
|
);
|
||||||
|
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
|
||||||
|
t.is(
|
||||||
|
actualArgs.includes("--new-analysis-summary"),
|
||||||
|
flagPassed,
|
||||||
|
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
|
||||||
|
);
|
||||||
|
t.is(
|
||||||
|
actualArgs.includes("--no-new-analysis-summary"),
|
||||||
|
negativeFlagPassed,
|
||||||
|
`--no-new-analysis-summary should${
|
||||||
|
negativeFlagPassed ? "" : "n't"
|
||||||
|
} be passed`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
test("runTool summarizes several fatal errors", async (t) => {
|
test("runTool summarizes several fatal errors", async (t) => {
|
||||||
const heapError =
|
const heapError =
|
||||||
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as path from "path";
|
|||||||
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
|
import { RequestError } from "@octokit/request-error";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
@@ -267,7 +268,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
|
|||||||
* The version flags below can be used to conditionally enable certain features
|
* The version flags below can be used to conditionally enable certain features
|
||||||
* on versions newer than this.
|
* on versions newer than this.
|
||||||
*/
|
*/
|
||||||
const CODEQL_MINIMUM_VERSION = "2.17.6";
|
const CODEQL_MINIMUM_VERSION = "2.16.6";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||||
@@ -370,11 +371,11 @@ export async function setupCodeQL(
|
|||||||
toolsVersion,
|
toolsVersion,
|
||||||
zstdAvailability,
|
zstdAvailability,
|
||||||
};
|
};
|
||||||
} catch (rawError) {
|
} catch (e) {
|
||||||
const e = api.wrapApiConfigurationError(rawError);
|
|
||||||
const ErrorClass =
|
const ErrorClass =
|
||||||
e instanceof util.ConfigurationError ||
|
e instanceof util.ConfigurationError ||
|
||||||
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
|
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
|
||||||
|
(e instanceof RequestError && e.status === 429) // rate limited
|
||||||
? util.ConfigurationError
|
? util.ConfigurationError
|
||||||
: Error;
|
: Error;
|
||||||
|
|
||||||
@@ -860,6 +861,14 @@ export async function getCodeQLForCmd(
|
|||||||
} else {
|
} else {
|
||||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||||
}
|
}
|
||||||
|
if (
|
||||||
|
!isSupportedToolsFeature(
|
||||||
|
await this.getVersion(),
|
||||||
|
ToolsFeature.AnalysisSummaryV2IsDefault,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
codeqlArgs.push("--new-analysis-summary");
|
||||||
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
if (querySuitePaths) {
|
if (querySuitePaths) {
|
||||||
codeqlArgs.push(...querySuitePaths);
|
codeqlArgs.push(...querySuitePaths);
|
||||||
|
|||||||
@@ -49,9 +49,10 @@ function createTestInitConfigInputs(
|
|||||||
return Object.assign(
|
return Object.assign(
|
||||||
{},
|
{},
|
||||||
{
|
{
|
||||||
analysisKinds: [AnalysisKind.CodeScanning],
|
analysisKindsInput: "code-scanning",
|
||||||
languagesInput: undefined,
|
languagesInput: undefined,
|
||||||
queriesInput: undefined,
|
queriesInput: undefined,
|
||||||
|
qualityQueriesInput: undefined,
|
||||||
packsInput: undefined,
|
packsInput: undefined,
|
||||||
configFile: undefined,
|
configFile: undefined,
|
||||||
dbLocation: undefined,
|
dbLocation: undefined,
|
||||||
@@ -148,7 +149,6 @@ test("load empty config", async (t) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
@@ -188,9 +188,8 @@ test("load code quality config", async (t) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
analysisKindsInput: "code-quality",
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -273,9 +272,8 @@ test("initActionState doesn't throw if there are queries configured in the repos
|
|||||||
|
|
||||||
await t.notThrowsAsync(async () => {
|
await t.notThrowsAsync(async () => {
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
analysisKindsInput: "code-quality",
|
||||||
languagesInput: languages,
|
languagesInput: languages,
|
||||||
repository: { owner: "github", repo: "example" },
|
repository: { owner: "github", repo: "example" },
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -312,7 +310,6 @@ test("loading a saved config produces the same config", async (t) => {
|
|||||||
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
|
||||||
|
|
||||||
const config1 = await configUtils.initConfig(
|
const config1 = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: "javascript,python",
|
languagesInput: "javascript,python",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -364,7 +361,6 @@ test("loading config with version mismatch throws", async (t) => {
|
|||||||
.returns("does-not-exist");
|
.returns("does-not-exist");
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput: "javascript,python",
|
languagesInput: "javascript,python",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -393,7 +389,6 @@ test("load input outside of workspace", async (t) => {
|
|||||||
return await withTmpDir(async (tempDir) => {
|
return await withTmpDir(async (tempDir) => {
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: "../input",
|
configFile: "../input",
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -421,7 +416,6 @@ test("load non-local input with invalid repo syntax", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile,
|
configFile,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -450,7 +444,6 @@ test("load non-existent input", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile,
|
configFile,
|
||||||
@@ -534,7 +527,6 @@ test("load non-empty input", async (t) => {
|
|||||||
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
const configFilePath = createConfigFile(inputFileContents, tempDir);
|
||||||
|
|
||||||
const actualConfig = await configUtils.initConfig(
|
const actualConfig = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
buildModeInput: "none",
|
buildModeInput: "none",
|
||||||
@@ -591,7 +583,6 @@ test("Using config input and file together, config input should be used.", async
|
|||||||
const languagesInput = "javascript";
|
const languagesInput = "javascript";
|
||||||
|
|
||||||
const config = await configUtils.initConfig(
|
const config = await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile: configFilePath,
|
configFile: configFilePath,
|
||||||
@@ -642,7 +633,6 @@ test("API client used when reading remote config", async (t) => {
|
|||||||
const languagesInput = "javascript";
|
const languagesInput = "javascript";
|
||||||
|
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
configFile,
|
configFile,
|
||||||
@@ -663,7 +653,6 @@ test("Remote config handles the case where a directory is provided", async (t) =
|
|||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: repoReference,
|
configFile: repoReference,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -692,7 +681,6 @@ test("Invalid format of remote config handled correctly", async (t) => {
|
|||||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
configFile: repoReference,
|
configFile: repoReference,
|
||||||
tempDir,
|
tempDir,
|
||||||
@@ -722,7 +710,6 @@ test("No detected languages", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
tempDir,
|
tempDir,
|
||||||
codeql,
|
codeql,
|
||||||
@@ -745,7 +732,6 @@ test("Unknown languages", async (t) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
await configUtils.initConfig(
|
await configUtils.initConfig(
|
||||||
createFeatures([]),
|
|
||||||
createTestInitConfigInputs({
|
createTestInitConfigInputs({
|
||||||
languagesInput,
|
languagesInput,
|
||||||
tempDir,
|
tempDir,
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import {
|
|||||||
CodeQuality,
|
CodeQuality,
|
||||||
codeQualityQueries,
|
codeQualityQueries,
|
||||||
CodeScanning,
|
CodeScanning,
|
||||||
|
parseAnalysisKinds,
|
||||||
} from "./analyses";
|
} from "./analyses";
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||||
@@ -19,7 +20,6 @@ import {
|
|||||||
calculateAugmentation,
|
calculateAugmentation,
|
||||||
ExcludeQueryFilter,
|
ExcludeQueryFilter,
|
||||||
generateCodeScanningConfig,
|
generateCodeScanningConfig,
|
||||||
parseUserConfig,
|
|
||||||
UserConfig,
|
UserConfig,
|
||||||
} from "./config/db-config";
|
} from "./config/db-config";
|
||||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||||
@@ -373,8 +373,10 @@ export async function getRawLanguages(
|
|||||||
|
|
||||||
/** Inputs required to initialize a configuration. */
|
/** Inputs required to initialize a configuration. */
|
||||||
export interface InitConfigInputs {
|
export interface InitConfigInputs {
|
||||||
|
analysisKindsInput: string;
|
||||||
languagesInput: string | undefined;
|
languagesInput: string | undefined;
|
||||||
queriesInput: string | undefined;
|
queriesInput: string | undefined;
|
||||||
|
qualityQueriesInput: string | undefined;
|
||||||
packsInput: string | undefined;
|
packsInput: string | undefined;
|
||||||
configFile: string | undefined;
|
configFile: string | undefined;
|
||||||
dbLocation: string | undefined;
|
dbLocation: string | undefined;
|
||||||
@@ -394,7 +396,6 @@ export interface InitConfigInputs {
|
|||||||
apiDetails: api.GitHubApiCombinedDetails;
|
apiDetails: api.GitHubApiCombinedDetails;
|
||||||
features: FeatureEnablement;
|
features: FeatureEnablement;
|
||||||
repositoryProperties: RepositoryProperties;
|
repositoryProperties: RepositoryProperties;
|
||||||
analysisKinds: AnalysisKind[];
|
|
||||||
logger: Logger;
|
logger: Logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -404,8 +405,10 @@ export interface InitConfigInputs {
|
|||||||
*/
|
*/
|
||||||
export async function initActionState(
|
export async function initActionState(
|
||||||
{
|
{
|
||||||
|
analysisKindsInput,
|
||||||
languagesInput,
|
languagesInput,
|
||||||
queriesInput,
|
queriesInput,
|
||||||
|
qualityQueriesInput,
|
||||||
packsInput,
|
packsInput,
|
||||||
buildModeInput,
|
buildModeInput,
|
||||||
dbLocation,
|
dbLocation,
|
||||||
@@ -421,11 +424,22 @@ export async function initActionState(
|
|||||||
githubVersion,
|
githubVersion,
|
||||||
features,
|
features,
|
||||||
repositoryProperties,
|
repositoryProperties,
|
||||||
analysisKinds,
|
|
||||||
logger,
|
logger,
|
||||||
}: InitConfigInputs,
|
}: InitConfigInputs,
|
||||||
userConfig: UserConfig,
|
userConfig: UserConfig,
|
||||||
): Promise<Config> {
|
): Promise<Config> {
|
||||||
|
const analysisKinds = await parseAnalysisKinds(analysisKindsInput);
|
||||||
|
|
||||||
|
// For backwards compatibility, add Code Quality to the enabled analysis kinds
|
||||||
|
// if an input to `quality-queries` was specified. We should remove this once
|
||||||
|
// `quality-queries` is no longer used.
|
||||||
|
if (
|
||||||
|
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||||
|
qualityQueriesInput !== undefined
|
||||||
|
) {
|
||||||
|
analysisKinds.push(AnalysisKind.CodeQuality);
|
||||||
|
}
|
||||||
|
|
||||||
const languages = await getLanguages(
|
const languages = await getLanguages(
|
||||||
codeql,
|
codeql,
|
||||||
languagesInput,
|
languagesInput,
|
||||||
@@ -526,12 +540,10 @@ async function downloadCacheWithTime(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function loadUserConfig(
|
async function loadUserConfig(
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
configFile: string,
|
||||||
workspacePath: string,
|
workspacePath: string,
|
||||||
apiDetails: api.GitHubApiCombinedDetails,
|
apiDetails: api.GitHubApiCombinedDetails,
|
||||||
tempDir: string,
|
tempDir: string,
|
||||||
validateConfig: boolean,
|
|
||||||
): Promise<UserConfig> {
|
): Promise<UserConfig> {
|
||||||
if (isLocal(configFile)) {
|
if (isLocal(configFile)) {
|
||||||
if (configFile !== userConfigFromActionPath(tempDir)) {
|
if (configFile !== userConfigFromActionPath(tempDir)) {
|
||||||
@@ -544,14 +556,9 @@ async function loadUserConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return getLocalConfig(logger, configFile, validateConfig);
|
return getLocalConfig(configFile);
|
||||||
} else {
|
} else {
|
||||||
return await getRemoteConfig(
|
return await getRemoteConfig(configFile, apiDetails);
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
apiDetails,
|
|
||||||
validateConfig,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -787,10 +794,7 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
|
|||||||
* This will parse the config from the user input if present, or generate
|
* This will parse the config from the user input if present, or generate
|
||||||
* a default config. The parsed config is then stored to a known location.
|
* a default config. The parsed config is then stored to a known location.
|
||||||
*/
|
*/
|
||||||
export async function initConfig(
|
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
|
||||||
features: FeatureEnablement,
|
|
||||||
inputs: InitConfigInputs,
|
|
||||||
): Promise<Config> {
|
|
||||||
const { logger, tempDir } = inputs;
|
const { logger, tempDir } = inputs;
|
||||||
|
|
||||||
// if configInput is set, it takes precedence over configFile
|
// if configInput is set, it takes precedence over configFile
|
||||||
@@ -810,14 +814,11 @@ export async function initConfig(
|
|||||||
logger.debug("No configuration file was provided");
|
logger.debug("No configuration file was provided");
|
||||||
} else {
|
} else {
|
||||||
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
logger.debug(`Using configuration file: ${inputs.configFile}`);
|
||||||
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
|
|
||||||
userConfig = await loadUserConfig(
|
userConfig = await loadUserConfig(
|
||||||
logger,
|
|
||||||
inputs.configFile,
|
inputs.configFile,
|
||||||
inputs.workspacePath,
|
inputs.workspacePath,
|
||||||
inputs.apiDetails,
|
inputs.apiDetails,
|
||||||
tempDir,
|
tempDir,
|
||||||
validateConfig,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -911,11 +912,7 @@ function isLocal(configPath: string): boolean {
|
|||||||
return configPath.indexOf("@") === -1;
|
return configPath.indexOf("@") === -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getLocalConfig(
|
function getLocalConfig(configFile: string): UserConfig {
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
|
||||||
validateConfig: boolean,
|
|
||||||
): UserConfig {
|
|
||||||
// Error if the file does not exist
|
// Error if the file does not exist
|
||||||
if (!fs.existsSync(configFile)) {
|
if (!fs.existsSync(configFile)) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
@@ -923,19 +920,12 @@ function getLocalConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseUserConfig(
|
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
fs.readFileSync(configFile, "utf-8"),
|
|
||||||
validateConfig,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getRemoteConfig(
|
async function getRemoteConfig(
|
||||||
logger: Logger,
|
|
||||||
configFile: string,
|
configFile: string,
|
||||||
apiDetails: api.GitHubApiCombinedDetails,
|
apiDetails: api.GitHubApiCombinedDetails,
|
||||||
validateConfig: boolean,
|
|
||||||
): Promise<UserConfig> {
|
): Promise<UserConfig> {
|
||||||
// retrieve the various parts of the config location, and ensure they're present
|
// retrieve the various parts of the config location, and ensure they're present
|
||||||
const format = new RegExp(
|
const format = new RegExp(
|
||||||
@@ -943,7 +933,7 @@ async function getRemoteConfig(
|
|||||||
);
|
);
|
||||||
const pieces = format.exec(configFile);
|
const pieces = format.exec(configFile);
|
||||||
// 5 = 4 groups + the whole expression
|
// 5 = 4 groups + the whole expression
|
||||||
if (pieces?.groups === undefined || pieces.length < 5) {
|
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
|
||||||
);
|
);
|
||||||
@@ -971,12 +961,9 @@ async function getRemoteConfig(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseUserConfig(
|
return yaml.load(
|
||||||
logger,
|
|
||||||
configFile,
|
|
||||||
Buffer.from(fileContents, "base64").toString("binary"),
|
Buffer.from(fileContents, "base64").toString("binary"),
|
||||||
validateConfig,
|
) as UserConfig;
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -2,13 +2,7 @@ import test, { ExecutionContext } from "ava";
|
|||||||
|
|
||||||
import { RepositoryProperties } from "../feature-flags/properties";
|
import { RepositoryProperties } from "../feature-flags/properties";
|
||||||
import { KnownLanguage, Language } from "../languages";
|
import { KnownLanguage, Language } from "../languages";
|
||||||
import { getRunnerLogger } from "../logging";
|
import { prettyPrintPack } from "../util";
|
||||||
import {
|
|
||||||
checkExpectedLogMessages,
|
|
||||||
getRecordingLogger,
|
|
||||||
LoggedMessage,
|
|
||||||
} from "../testing-utils";
|
|
||||||
import { ConfigurationError, prettyPrintPack } from "../util";
|
|
||||||
|
|
||||||
import * as dbConfig from "./db-config";
|
import * as dbConfig from "./db-config";
|
||||||
|
|
||||||
@@ -397,111 +391,3 @@ test(
|
|||||||
{},
|
{},
|
||||||
/"a-pack-without-a-scope" is not a valid pack/,
|
/"a-pack-without-a-scope" is not a valid pack/,
|
||||||
);
|
);
|
||||||
|
|
||||||
test("parseUserConfig - successfully parses valid YAML", (t) => {
|
|
||||||
const result = dbConfig.parseUserConfig(
|
|
||||||
getRunnerLogger(true),
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries:
|
|
||||||
- uses: foo
|
|
||||||
some-unknown-option: true
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
t.truthy(result);
|
|
||||||
if (t.truthy(result["paths-ignore"])) {
|
|
||||||
t.is(result["paths-ignore"].length, 1);
|
|
||||||
t.is(result["paths-ignore"][0], "some/path");
|
|
||||||
}
|
|
||||||
if (t.truthy(result["queries"])) {
|
|
||||||
t.is(result["queries"].length, 1);
|
|
||||||
t.deepEqual(result["queries"][0], { uses: "foo" });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
|
|
||||||
t.throws(
|
|
||||||
() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
getRunnerLogger(true),
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries:
|
|
||||||
- foo
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
{
|
|
||||||
instanceOf: ConfigurationError,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.notThrows(() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
query-filters:
|
|
||||||
- something
|
|
||||||
- include: foo
|
|
||||||
- exclude: bar
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.throws(
|
|
||||||
() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries: true
|
|
||||||
`,
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
{
|
|
||||||
instanceOf: ConfigurationError,
|
|
||||||
message:
|
|
||||||
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
const expectedMessages = ["instance.queries is not of a type(s) array"];
|
|
||||||
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
|
|
||||||
const loggedMessages: LoggedMessage[] = [];
|
|
||||||
const logger = getRecordingLogger(loggedMessages);
|
|
||||||
|
|
||||||
t.notThrows(() =>
|
|
||||||
dbConfig.parseUserConfig(
|
|
||||||
logger,
|
|
||||||
"test",
|
|
||||||
`
|
|
||||||
paths-ignore:
|
|
||||||
- "some/path"
|
|
||||||
queries: true
|
|
||||||
`,
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
import * as jsonschema from "jsonschema";
|
|
||||||
import * as semver from "semver";
|
import * as semver from "semver";
|
||||||
|
|
||||||
import * as errorMessages from "../error-messages";
|
import * as errorMessages from "../error-messages";
|
||||||
@@ -380,7 +378,10 @@ function combineQueries(
|
|||||||
const result: QuerySpec[] = [];
|
const result: QuerySpec[] = [];
|
||||||
|
|
||||||
// Query settings obtained from the repository properties have the highest precedence.
|
// Query settings obtained from the repository properties have the highest precedence.
|
||||||
if (augmentationProperties.repoPropertyQueries?.input) {
|
if (
|
||||||
|
augmentationProperties.repoPropertyQueries &&
|
||||||
|
augmentationProperties.repoPropertyQueries.input
|
||||||
|
) {
|
||||||
logger.info(
|
logger.info(
|
||||||
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
|
||||||
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
|
||||||
@@ -473,53 +474,3 @@ export function generateCodeScanningConfig(
|
|||||||
|
|
||||||
return augmentedConfig;
|
return augmentedConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Attempts to parse `contents` into a `UserConfig` value.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
|
|
||||||
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
|
|
||||||
* @param validateConfig Whether to validate the configuration file against the schema.
|
|
||||||
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
|
|
||||||
* @throws A `ConfigurationError` if parsing failed.
|
|
||||||
*/
|
|
||||||
export function parseUserConfig(
|
|
||||||
logger: Logger,
|
|
||||||
pathInput: string,
|
|
||||||
contents: string,
|
|
||||||
validateConfig: boolean,
|
|
||||||
): UserConfig {
|
|
||||||
try {
|
|
||||||
const schema =
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
|
||||||
require("../../src/db-config-schema.json") as jsonschema.Schema;
|
|
||||||
|
|
||||||
const doc = yaml.load(contents);
|
|
||||||
|
|
||||||
if (validateConfig) {
|
|
||||||
const result = new jsonschema.Validator().validate(doc, schema);
|
|
||||||
|
|
||||||
if (result.errors.length > 0) {
|
|
||||||
for (const error of result.errors) {
|
|
||||||
logger.error(error.stack);
|
|
||||||
}
|
|
||||||
throw new ConfigurationError(
|
|
||||||
errorMessages.getInvalidConfigFileMessage(
|
|
||||||
pathInput,
|
|
||||||
result.errors.map((e) => e.stack),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return doc as UserConfig;
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof yaml.YAMLException) {
|
|
||||||
throw new ConfigurationError(
|
|
||||||
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import test from "ava";
|
|||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { AnalysisKind } from "./analyses";
|
|
||||||
import { GitHubApiDetails } from "./api-client";
|
import { GitHubApiDetails } from "./api-client";
|
||||||
import * as apiClient from "./api-client";
|
import * as apiClient from "./api-client";
|
||||||
import { createStubCodeQL } from "./codeql";
|
import { createStubCodeQL } from "./codeql";
|
||||||
@@ -109,39 +108,6 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
|
|
||||||
await withTmpDir(async (tmpDir) => {
|
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
|
||||||
sinon
|
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
|
||||||
.withArgs("upload-database")
|
|
||||||
.returns("true");
|
|
||||||
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
|
|
||||||
|
|
||||||
await mockHttpRequests(201);
|
|
||||||
|
|
||||||
const loggedMessages = [];
|
|
||||||
await uploadDatabases(
|
|
||||||
testRepoName,
|
|
||||||
getCodeQL(),
|
|
||||||
{
|
|
||||||
...getTestConfig(tmpDir),
|
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
|
||||||
},
|
|
||||||
testApiDetails,
|
|
||||||
getRecordingLogger(loggedMessages),
|
|
||||||
);
|
|
||||||
t.assert(
|
|
||||||
loggedMessages.find(
|
|
||||||
(v: LoggedMessage) =>
|
|
||||||
v.type === "debug" &&
|
|
||||||
v.message ===
|
|
||||||
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
|
|
||||||
) !== undefined,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("Abort database upload if running against GHES", async (t) => {
|
test("Abort database upload if running against GHES", async (t) => {
|
||||||
await withTmpDir(async (tmpDir) => {
|
await withTmpDir(async (tmpDir) => {
|
||||||
setupActionsVars(tmpDir, tmpDir);
|
setupActionsVars(tmpDir, tmpDir);
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { AnalysisKind } from "./analyses";
|
|
||||||
import { getApiClient, GitHubApiDetails } from "./api-client";
|
import { getApiClient, GitHubApiDetails } from "./api-client";
|
||||||
import { type CodeQL } from "./codeql";
|
import { type CodeQL } from "./codeql";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
@@ -23,13 +22,6 @@ export async function uploadDatabases(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
|
|
||||||
logger.debug(
|
|
||||||
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (util.isInTestMode()) {
|
if (util.isInTestMode()) {
|
||||||
logger.debug("In test mode. Skipping database upload.");
|
logger.debug("In test mode. Skipping database upload.");
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -1,145 +0,0 @@
|
|||||||
{
|
|
||||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
|
||||||
"title": "CodeQL Database Configuration",
|
|
||||||
"description": "Format of the config file supplied by the user for CodeQL analysis",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Name of the configuration"
|
|
||||||
},
|
|
||||||
"disable-default-queries": {
|
|
||||||
"type": "boolean",
|
|
||||||
"description": "Whether to disable default queries"
|
|
||||||
},
|
|
||||||
"queries": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "List of additional queries to run",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/QuerySpec"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"paths-ignore": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Paths to ignore during analysis",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"paths": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Paths to include in analysis",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"packs": {
|
|
||||||
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"additionalProperties": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"query-filters": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
|
|
||||||
"items": {
|
|
||||||
"$ref": "#/definitions/QueryFilter"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"additionalProperties": true,
|
|
||||||
"definitions": {
|
|
||||||
"QuerySpec": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Detailed query specification object",
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Optional name for the query"
|
|
||||||
},
|
|
||||||
"uses": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The query or query suite to use"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["uses"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"QueryFilter": {
|
|
||||||
"description": "Query filter that can either include or exclude queries",
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/ExcludeQueryFilter"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/IncludeQueryFilter"
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"ExcludeQueryFilter": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Filter to exclude queries",
|
|
||||||
"properties": {
|
|
||||||
"exclude": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Queries to exclude",
|
|
||||||
"additionalProperties": {
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["exclude"],
|
|
||||||
"additionalProperties": false
|
|
||||||
},
|
|
||||||
"IncludeQueryFilter": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Filter to include queries",
|
|
||||||
"properties": {
|
|
||||||
"include": {
|
|
||||||
"type": "object",
|
|
||||||
"description": "Queries to include",
|
|
||||||
"additionalProperties": {
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["include"],
|
|
||||||
"additionalProperties": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-v2.23.3",
|
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||||
"cliVersion": "2.23.3",
|
"cliVersion": "2.23.2",
|
||||||
"priorBundleVersion": "codeql-bundle-v2.23.2",
|
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||||
"priorCliVersion": "2.23.2"
|
"priorCliVersion": "2.23.1"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,10 +4,7 @@ import * as sinon from "sinon";
|
|||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import type { PullRequestBranches } from "./actions-util";
|
import type { PullRequestBranches } from "./actions-util";
|
||||||
import * as apiClient from "./api-client";
|
import * as apiClient from "./api-client";
|
||||||
import {
|
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||||
shouldPerformDiffInformedAnalysis,
|
|
||||||
exportedForTesting,
|
|
||||||
} from "./diff-informed-analysis-utils";
|
|
||||||
import { Feature, Features } from "./feature-flags";
|
import { Feature, Features } from "./feature-flags";
|
||||||
import { getRunnerLogger } from "./logging";
|
import { getRunnerLogger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import { parseRepositoryNwo } from "./repository";
|
||||||
@@ -186,201 +183,3 @@ test(
|
|||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
|
|
||||||
sinon
|
|
||||||
.stub(actionsUtil, "getRequiredInput")
|
|
||||||
.withArgs("checkout_path")
|
|
||||||
.returns("/checkout/path");
|
|
||||||
return exportedForTesting.getDiffRanges(
|
|
||||||
{
|
|
||||||
filename: "test.txt",
|
|
||||||
changes,
|
|
||||||
patch: patch?.join("\n"),
|
|
||||||
},
|
|
||||||
getRunnerLogger(true),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
test("getDiffRanges: file unchanged", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(0, undefined);
|
|
||||||
t.deepEqual(diffRanges, []);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: file diff too large", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 0,
|
|
||||||
endLine: 0,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: diff thunk with single addition range", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,6 +50,8 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"+1",
|
|
||||||
"+2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 53,
|
|
||||||
endLine: 54,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,8 +50,6 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"-1",
|
|
||||||
"-2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, []);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: diff thunk with single update range", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,7 +50,7 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"-1",
|
|
||||||
"+2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 53,
|
|
||||||
endLine: 53,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,7 +50,9 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"+1",
|
|
||||||
" c",
|
|
||||||
"+2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 53,
|
|
||||||
endLine: 53,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 55,
|
|
||||||
endLine: 55,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,7 +50,7 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"-1",
|
|
||||||
" d",
|
|
||||||
"-2",
|
|
||||||
"+3",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
"+4",
|
|
||||||
"+5",
|
|
||||||
" g",
|
|
||||||
" h",
|
|
||||||
" i",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 54,
|
|
||||||
endLine: 54,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 57,
|
|
||||||
endLine: 58,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: multiple diff thunks", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, [
|
|
||||||
"@@ -30,6 +50,8 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"+1",
|
|
||||||
"+2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
"@@ -130,6 +150,8 @@",
|
|
||||||
" a",
|
|
||||||
" b",
|
|
||||||
" c",
|
|
||||||
"+1",
|
|
||||||
"+2",
|
|
||||||
" d",
|
|
||||||
" e",
|
|
||||||
" f",
|
|
||||||
]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 53,
|
|
||||||
endLine: 54,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 153,
|
|
||||||
endLine: 154,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: no diff context lines", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
|
||||||
t.deepEqual(diffRanges, [
|
|
||||||
{
|
|
||||||
path: "/checkout/path/test.txt",
|
|
||||||
startLine: 50,
|
|
||||||
endLine: 51,
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getDiffRanges: malformed thunk header", async (t) => {
|
|
||||||
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
|
|
||||||
t.deepEqual(diffRanges, undefined);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -3,25 +3,12 @@ import * as path from "path";
|
|||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import type { PullRequestBranches } from "./actions-util";
|
import type { PullRequestBranches } from "./actions-util";
|
||||||
import { getApiClient, getGitHubVersion } from "./api-client";
|
import { getGitHubVersion } from "./api-client";
|
||||||
import type { CodeQL } from "./codeql";
|
import type { CodeQL } from "./codeql";
|
||||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { getRepositoryNwoFromEnv } from "./repository";
|
|
||||||
import { GitHubVariant, satisfiesGHESVersion } from "./util";
|
import { GitHubVariant, satisfiesGHESVersion } from "./util";
|
||||||
|
|
||||||
/**
|
|
||||||
* This interface is an abbreviated version of the file diff object returned by
|
|
||||||
* the GitHub API.
|
|
||||||
*/
|
|
||||||
interface FileDiff {
|
|
||||||
filename: string;
|
|
||||||
changes: number;
|
|
||||||
// A patch may be absent if the file is binary, if the file diff is too large,
|
|
||||||
// or if the file is unchanged.
|
|
||||||
patch?: string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the action should perform diff-informed analysis.
|
* Check if the action should perform diff-informed analysis.
|
||||||
*/
|
*/
|
||||||
@@ -106,174 +93,3 @@ export function readDiffRangesJsonFile(
|
|||||||
);
|
);
|
||||||
return JSON.parse(jsonContents) as DiffThunkRange[];
|
return JSON.parse(jsonContents) as DiffThunkRange[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the file line ranges that were added or modified in the pull request.
|
|
||||||
*
|
|
||||||
* @param branches The base and head branches of the pull request.
|
|
||||||
* @param logger
|
|
||||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
|
||||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
|
||||||
* added or modified range in that file. Returns `undefined` if the action was
|
|
||||||
* not triggered by a pull request or if there was an error.
|
|
||||||
*/
|
|
||||||
export async function getPullRequestEditedDiffRanges(
|
|
||||||
branches: PullRequestBranches,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<DiffThunkRange[] | undefined> {
|
|
||||||
const fileDiffs = await getFileDiffsWithBasehead(branches, logger);
|
|
||||||
if (fileDiffs === undefined) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (fileDiffs.length >= 300) {
|
|
||||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
|
||||||
// we see that many changed files, it is possible that there could be more,
|
|
||||||
// with the rest being truncated. In this case, we should not attempt to
|
|
||||||
// compute the diff ranges, as the result would be incomplete.
|
|
||||||
logger.warning(
|
|
||||||
`Cannot retrieve the full diff because there are too many ` +
|
|
||||||
`(${fileDiffs.length}) changed files in the pull request.`,
|
|
||||||
);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const results: DiffThunkRange[] = [];
|
|
||||||
for (const filediff of fileDiffs) {
|
|
||||||
const diffRanges = getDiffRanges(filediff, logger);
|
|
||||||
if (diffRanges === undefined) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
results.push(...diffRanges);
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getFileDiffsWithBasehead(
|
|
||||||
branches: PullRequestBranches,
|
|
||||||
logger: Logger,
|
|
||||||
): Promise<FileDiff[] | undefined> {
|
|
||||||
// Check CODE_SCANNING_REPOSITORY first. If it is empty or not set, fall back
|
|
||||||
// to GITHUB_REPOSITORY.
|
|
||||||
const repositoryNwo = getRepositoryNwoFromEnv(
|
|
||||||
"CODE_SCANNING_REPOSITORY",
|
|
||||||
"GITHUB_REPOSITORY",
|
|
||||||
);
|
|
||||||
const basehead = `${branches.base}...${branches.head}`;
|
|
||||||
try {
|
|
||||||
const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
|
|
||||||
{
|
|
||||||
owner: repositoryNwo.owner,
|
|
||||||
repo: repositoryNwo.repo,
|
|
||||||
basehead,
|
|
||||||
per_page: 1,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
logger.debug(
|
|
||||||
`Response from compareCommitsWithBasehead(${basehead}):` +
|
|
||||||
`\n${JSON.stringify(response, null, 2)}`,
|
|
||||||
);
|
|
||||||
return response.data.files;
|
|
||||||
} catch (error: any) {
|
|
||||||
if (error.status) {
|
|
||||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
|
||||||
logger.debug(
|
|
||||||
`Error running compareCommitsWithBasehead(${basehead}):` +
|
|
||||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
|
||||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
|
|
||||||
);
|
|
||||||
return undefined;
|
|
||||||
} else {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDiffRanges(
|
|
||||||
fileDiff: FileDiff,
|
|
||||||
logger: Logger,
|
|
||||||
): DiffThunkRange[] | undefined {
|
|
||||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
|
||||||
// uses forward slashes as the path separator, so on Windows we need to
|
|
||||||
// replace any backslashes with forward slashes.
|
|
||||||
const filename = path
|
|
||||||
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
|
|
||||||
.replaceAll(path.sep, "/");
|
|
||||||
|
|
||||||
if (fileDiff.patch === undefined) {
|
|
||||||
if (fileDiff.changes === 0) {
|
|
||||||
// There are situations where a changed file legitimately has no diff.
|
|
||||||
// For example, the file may be a binary file, or that the file may have
|
|
||||||
// been renamed with no changes to its contents. In these cases, the
|
|
||||||
// file would be reported as having 0 changes, and we can return an empty
|
|
||||||
// array to indicate no diff range in this file.
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
// If a file is reported to have nonzero changes but no patch, that may be
|
|
||||||
// due to the file diff being too large. In this case, we should fall back
|
|
||||||
// to a special diff range that covers the entire file.
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
path: filename,
|
|
||||||
startLine: 0,
|
|
||||||
endLine: 0,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
// The 1-based file line number of the current line
|
|
||||||
let currentLine = 0;
|
|
||||||
// The 1-based file line number that starts the current range of added lines
|
|
||||||
let additionRangeStartLine: number | undefined = undefined;
|
|
||||||
const diffRanges: DiffThunkRange[] = [];
|
|
||||||
|
|
||||||
const diffLines = fileDiff.patch.split("\n");
|
|
||||||
// Adding a fake context line at the end ensures that the following loop will
|
|
||||||
// always terminate the last range of added lines.
|
|
||||||
diffLines.push(" ");
|
|
||||||
|
|
||||||
for (const diffLine of diffLines) {
|
|
||||||
if (diffLine.startsWith("-")) {
|
|
||||||
// Ignore deletions completely -- we do not even want to consider them when
|
|
||||||
// calculating consecutive ranges of added lines.
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (diffLine.startsWith("+")) {
|
|
||||||
if (additionRangeStartLine === undefined) {
|
|
||||||
additionRangeStartLine = currentLine;
|
|
||||||
}
|
|
||||||
currentLine++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (additionRangeStartLine !== undefined) {
|
|
||||||
// Any line that does not start with a "+" or "-" terminates the current
|
|
||||||
// range of added lines.
|
|
||||||
diffRanges.push({
|
|
||||||
path: filename,
|
|
||||||
startLine: additionRangeStartLine,
|
|
||||||
endLine: currentLine - 1,
|
|
||||||
});
|
|
||||||
additionRangeStartLine = undefined;
|
|
||||||
}
|
|
||||||
if (diffLine.startsWith("@@ ")) {
|
|
||||||
// A new hunk header line resets the current line number.
|
|
||||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
|
||||||
if (match === null) {
|
|
||||||
logger.warning(
|
|
||||||
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
|
|
||||||
);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
currentLine = parseInt(match[1], 10);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (diffLine.startsWith(" ")) {
|
|
||||||
// An unchanged context line advances the current line number.
|
|
||||||
currentLine++;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return diffRanges;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const exportedForTesting = {
|
|
||||||
getDiffRanges,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -47,9 +47,6 @@ export enum EnvVar {
|
|||||||
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
/** Whether the CodeQL Action has already warned the user about low disk space. */
|
||||||
HAS_WARNED_ABOUT_DISK_SPACE = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE",
|
HAS_WARNED_ABOUT_DISK_SPACE = "CODEQL_ACTION_HAS_WARNED_ABOUT_DISK_SPACE",
|
||||||
|
|
||||||
/** Whether the `setup-codeql` action has been run. */
|
|
||||||
SETUP_CODEQL_ACTION_HAS_RUN = "CODEQL_ACTION_SETUP_CODEQL_HAS_RUN",
|
|
||||||
|
|
||||||
/** Whether the init action has been run. */
|
/** Whether the init action has been run. */
|
||||||
INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN",
|
INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN",
|
||||||
|
|
||||||
|
|||||||
@@ -14,22 +14,6 @@ export function getConfigFileDoesNotExistErrorMessage(
|
|||||||
return `The configuration file "${configFile}" does not exist`;
|
return `The configuration file "${configFile}" does not exist`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getConfigFileParseErrorMessage(
|
|
||||||
configFile: string,
|
|
||||||
message: string,
|
|
||||||
): string {
|
|
||||||
return `Cannot parse "${configFile}": ${message}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getInvalidConfigFileMessage(
|
|
||||||
configFile: string,
|
|
||||||
messages: string[],
|
|
||||||
): string {
|
|
||||||
const andMore =
|
|
||||||
messages.length > 10 ? `, and ${messages.length - 10} more.` : ".";
|
|
||||||
return `The configuration file "${configFile}" is invalid: ${messages.slice(0, 10).join(", ")}${andMore}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getConfigFileRepoFormatInvalidMessage(
|
export function getConfigFileRepoFormatInvalidMessage(
|
||||||
configFile: string,
|
configFile: string,
|
||||||
): string {
|
): string {
|
||||||
|
|||||||
@@ -44,7 +44,6 @@ export interface FeatureEnablement {
|
|||||||
*/
|
*/
|
||||||
export enum Feature {
|
export enum Feature {
|
||||||
AllowToolcacheInput = "allow_toolcache_input",
|
AllowToolcacheInput = "allow_toolcache_input",
|
||||||
AnalyzeUseNewUpload = "analyze_use_new_upload",
|
|
||||||
CleanupTrapCaches = "cleanup_trap_caches",
|
CleanupTrapCaches = "cleanup_trap_caches",
|
||||||
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
||||||
DiffInformedQueries = "diff_informed_queries",
|
DiffInformedQueries = "diff_informed_queries",
|
||||||
@@ -78,7 +77,6 @@ export enum Feature {
|
|||||||
QaTelemetryEnabled = "qa_telemetry_enabled",
|
QaTelemetryEnabled = "qa_telemetry_enabled",
|
||||||
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
|
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
|
||||||
UseRepositoryProperties = "use_repository_properties",
|
UseRepositoryProperties = "use_repository_properties",
|
||||||
ValidateDbConfig = "validate_db_config",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const featureConfig: Record<
|
export const featureConfig: Record<
|
||||||
@@ -117,11 +115,6 @@ export const featureConfig: Record<
|
|||||||
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
|
||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
},
|
},
|
||||||
[Feature.AnalyzeUseNewUpload]: {
|
|
||||||
defaultValue: false,
|
|
||||||
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
[Feature.CleanupTrapCaches]: {
|
[Feature.CleanupTrapCaches]: {
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
|
||||||
@@ -294,11 +287,6 @@ export const featureConfig: Record<
|
|||||||
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
|
||||||
minimumVersion: "2.23.0",
|
minimumVersion: "2.23.0",
|
||||||
},
|
},
|
||||||
[Feature.ValidateDbConfig]: {
|
|
||||||
defaultValue: false,
|
|
||||||
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -653,7 +641,7 @@ class GitHubFeatureFlags {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.logger.debug(
|
this.logger.debug(
|
||||||
"Loaded the following default values for the feature flags from the CodeQL Action API:",
|
"Loaded the following default values for the feature flags from the Code Scanning API:",
|
||||||
);
|
);
|
||||||
for (const [feature, value] of Object.entries(remoteFlags).sort(
|
for (const [feature, value] of Object.entries(remoteFlags).sort(
|
||||||
([nameA], [nameB]) => nameA.localeCompare(nameB),
|
([nameA], [nameB]) => nameA.localeCompare(nameB),
|
||||||
@@ -663,13 +651,12 @@ class GitHubFeatureFlags {
|
|||||||
this.hasAccessedRemoteFeatureFlags = true;
|
this.hasAccessedRemoteFeatureFlags = true;
|
||||||
return remoteFlags;
|
return remoteFlags;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const httpError = util.asHTTPError(e);
|
if (util.isHTTPError(e) && e.status === 403) {
|
||||||
if (httpError?.status === 403) {
|
|
||||||
this.logger.warning(
|
this.logger.warning(
|
||||||
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
|
"This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
|
||||||
"As a result, it will not be opted into any experimental features. " +
|
"As a result, it will not be opted into any experimental features. " +
|
||||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||||
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
|
`please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`,
|
||||||
);
|
);
|
||||||
this.hasAccessedRemoteFeatureFlags = false;
|
this.hasAccessedRemoteFeatureFlags = false;
|
||||||
return {};
|
return {};
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import test, { ExecutionContext } from "ava";
|
|||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { AnalysisKind } from "./analyses";
|
|
||||||
import * as codeql from "./codeql";
|
import * as codeql from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
import { Feature } from "./feature-flags";
|
import { Feature } from "./feature-flags";
|
||||||
@@ -29,13 +28,12 @@ test("post: init action with debug mode off", async (t) => {
|
|||||||
const gitHubVersion: util.GitHubVersion = {
|
const gitHubVersion: util.GitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
};
|
};
|
||||||
sinon.stub(configUtils, "getConfig").resolves(
|
sinon.stub(configUtils, "getConfig").resolves({
|
||||||
createTestConfig({
|
|
||||||
debugMode: false,
|
debugMode: false,
|
||||||
gitHubVersion,
|
gitHubVersion,
|
||||||
languages: [],
|
languages: [],
|
||||||
}),
|
packs: [],
|
||||||
);
|
} as unknown as configUtils.Config);
|
||||||
|
|
||||||
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
|
||||||
const printDebugLogsSpy = sinon.spy();
|
const printDebugLogsSpy = sinon.spy();
|
||||||
@@ -297,17 +295,6 @@ test("uploading failed SARIF run fails when workflow does not reference github/c
|
|||||||
t.truthy(result.upload_failed_run_stack_trace);
|
t.truthy(result.upload_failed_run_stack_trace);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", async (t) => {
|
|
||||||
const result = await testFailedSarifUpload(t, createTestWorkflow([]), {
|
|
||||||
analysisKinds: [AnalysisKind.CodeQuality],
|
|
||||||
expectUpload: false,
|
|
||||||
});
|
|
||||||
t.is(
|
|
||||||
result.upload_failed_run_skipped_because,
|
|
||||||
"Code Scanning is not enabled.",
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
function createTestWorkflow(
|
function createTestWorkflow(
|
||||||
steps: workflow.WorkflowJobStep[],
|
steps: workflow.WorkflowJobStep[],
|
||||||
): workflow.Workflow {
|
): workflow.Workflow {
|
||||||
@@ -340,22 +327,20 @@ async function testFailedSarifUpload(
|
|||||||
expectUpload = true,
|
expectUpload = true,
|
||||||
exportDiagnosticsEnabled = false,
|
exportDiagnosticsEnabled = false,
|
||||||
matrix = {},
|
matrix = {},
|
||||||
analysisKinds = [AnalysisKind.CodeScanning],
|
|
||||||
}: {
|
}: {
|
||||||
category?: string;
|
category?: string;
|
||||||
databaseExists?: boolean;
|
databaseExists?: boolean;
|
||||||
expectUpload?: boolean;
|
expectUpload?: boolean;
|
||||||
exportDiagnosticsEnabled?: boolean;
|
exportDiagnosticsEnabled?: boolean;
|
||||||
matrix?: { [key: string]: string };
|
matrix?: { [key: string]: string };
|
||||||
analysisKinds?: AnalysisKind[];
|
|
||||||
} = {},
|
} = {},
|
||||||
): Promise<initActionPostHelper.UploadFailedSarifResult> {
|
): Promise<initActionPostHelper.UploadFailedSarifResult> {
|
||||||
const config = createTestConfig({
|
const config = {
|
||||||
analysisKinds,
|
|
||||||
codeQLCmd: "codeql",
|
codeQLCmd: "codeql",
|
||||||
debugMode: true,
|
debugMode: true,
|
||||||
languages: [],
|
languages: [],
|
||||||
});
|
packs: [],
|
||||||
|
} as unknown as configUtils.Config;
|
||||||
if (databaseExists) {
|
if (databaseExists) {
|
||||||
config.dbLocation = "path/to/database";
|
config.dbLocation = "path/to/database";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import * as actionsUtil from "./actions-util";
|
|||||||
import { CodeScanning } from "./analyses";
|
import { CodeScanning } from "./analyses";
|
||||||
import { getApiClient } from "./api-client";
|
import { getApiClient } from "./api-client";
|
||||||
import { CodeQL, getCodeQL } from "./codeql";
|
import { CodeQL, getCodeQL } from "./codeql";
|
||||||
import { Config, isCodeScanningEnabled } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
import * as dependencyCaching from "./dependency-caching";
|
import * as dependencyCaching from "./dependency-caching";
|
||||||
import { EnvVar } from "./environment";
|
import { EnvVar } from "./environment";
|
||||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||||
@@ -139,15 +139,6 @@ export async function tryUploadSarifIfRunFailed(
|
|||||||
EnvVar.JOB_STATUS,
|
EnvVar.JOB_STATUS,
|
||||||
process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus,
|
process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus,
|
||||||
);
|
);
|
||||||
|
|
||||||
// If the only enabled analysis kind is `code-quality`, then we shouldn't
|
|
||||||
// upload the failed SARIF to Code Scanning.
|
|
||||||
if (!isCodeScanningEnabled(config)) {
|
|
||||||
return {
|
|
||||||
upload_failed_run_skipped_because: "Code Scanning is not enabled.",
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return await maybeUploadFailedSarif(
|
return await maybeUploadFailedSarif(
|
||||||
config,
|
config,
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ import {
|
|||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
persistInputs,
|
persistInputs,
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { AnalysisKind, getAnalysisKinds } from "./analyses";
|
|
||||||
import { getGitHubVersion } from "./api-client";
|
import { getGitHubVersion } from "./api-client";
|
||||||
import {
|
import {
|
||||||
getDependencyCachingEnabled,
|
getDependencyCachingEnabled,
|
||||||
@@ -57,7 +56,6 @@ import { ToolsSource } from "./setup-codeql";
|
|||||||
import {
|
import {
|
||||||
ActionName,
|
ActionName,
|
||||||
InitStatusReport,
|
InitStatusReport,
|
||||||
InitToolsDownloadFields,
|
|
||||||
InitWithConfigStatusReport,
|
InitWithConfigStatusReport,
|
||||||
createInitWithConfigStatusReport,
|
createInitWithConfigStatusReport,
|
||||||
createStatusReportBase,
|
createStatusReportBase,
|
||||||
@@ -88,29 +86,14 @@ import {
|
|||||||
} from "./util";
|
} from "./util";
|
||||||
import { validateWorkflow } from "./workflow";
|
import { validateWorkflow } from "./workflow";
|
||||||
|
|
||||||
/**
|
/** Fields of the init status report populated when the tools source is `download`. */
|
||||||
* Sends a status report indicating that the `init` Action is starting.
|
interface InitToolsDownloadFields {
|
||||||
*
|
/** Time taken to download the bundle, in milliseconds. */
|
||||||
* @param startedAt
|
tools_download_duration_ms?: number;
|
||||||
* @param config
|
/**
|
||||||
* @param logger
|
* Whether the relevant tools dotcom feature flags have been misconfigured.
|
||||||
*/
|
* Only populated if we attempt to determine the default version based on the dotcom feature flags. */
|
||||||
async function sendStartingStatusReport(
|
tools_feature_flags_valid?: boolean;
|
||||||
startedAt: Date,
|
|
||||||
config: Partial<configUtils.Config> | undefined,
|
|
||||||
logger: Logger,
|
|
||||||
) {
|
|
||||||
const statusReportBase = await createStatusReportBase(
|
|
||||||
ActionName.Init,
|
|
||||||
"starting",
|
|
||||||
startedAt,
|
|
||||||
config,
|
|
||||||
await checkDiskUsage(logger),
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
if (statusReportBase !== undefined) {
|
|
||||||
await sendStatusReport(statusReportBase);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sendCompletedStatusReport(
|
async function sendCompletedStatusReport(
|
||||||
@@ -227,7 +210,6 @@ async function run() {
|
|||||||
? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo)
|
? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo)
|
||||||
: {};
|
: {};
|
||||||
|
|
||||||
// Create a unique identifier for this run.
|
|
||||||
const jobRunUuid = uuidV4();
|
const jobRunUuid = uuidV4();
|
||||||
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
||||||
core.exportVariable(EnvVar.JOB_RUN_UUID, jobRunUuid);
|
core.exportVariable(EnvVar.JOB_RUN_UUID, jobRunUuid);
|
||||||
@@ -245,30 +227,17 @@ async function run() {
|
|||||||
);
|
);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Parsing the `analysis-kinds` input may throw a `ConfigurationError`, which we don't want before
|
const statusReportBase = await createStatusReportBase(
|
||||||
// we have called `sendStartingStatusReport` below. However, we want the analysis kinds for that status
|
ActionName.Init,
|
||||||
// report. To work around this, we ignore exceptions that are thrown here and then call `getAnalysisKinds`
|
"starting",
|
||||||
// a second time later. The second call will then throw the exception again. If `getAnalysisKinds` is
|
startedAt,
|
||||||
// successful, the results are cached so that we don't duplicate the work in normal runs.
|
config,
|
||||||
let analysisKinds: AnalysisKind[] | undefined;
|
await checkDiskUsage(logger),
|
||||||
try {
|
logger,
|
||||||
analysisKinds = await getAnalysisKinds(logger);
|
|
||||||
} catch (err) {
|
|
||||||
logger.debug(
|
|
||||||
`Failed to parse analysis kinds for 'starting' status report: ${getErrorMessage(err)}`,
|
|
||||||
);
|
);
|
||||||
|
if (statusReportBase !== undefined) {
|
||||||
|
await sendStatusReport(statusReportBase);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send a status report indicating that an analysis is starting.
|
|
||||||
await sendStartingStatusReport(startedAt, { analysisKinds }, logger);
|
|
||||||
|
|
||||||
// Throw a `ConfigurationError` if the `setup-codeql` action has been run.
|
|
||||||
if (process.env[EnvVar.SETUP_CODEQL_ACTION_HAS_RUN] === "true") {
|
|
||||||
throw new ConfigurationError(
|
|
||||||
`The 'init' action should not be run in the same workflow as 'setup-codeql'.`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||||
gitHubVersion.type,
|
gitHubVersion.type,
|
||||||
);
|
);
|
||||||
@@ -324,11 +293,21 @@ async function run() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
analysisKinds = await getAnalysisKinds(logger);
|
// Warn that `quality-queries` is deprecated if there is an argument for it.
|
||||||
config = await initConfig(features, {
|
const qualityQueriesInput = getOptionalInput("quality-queries");
|
||||||
analysisKinds,
|
|
||||||
|
if (qualityQueriesInput !== undefined) {
|
||||||
|
logger.warning(
|
||||||
|
"The `quality-queries` input is deprecated and will be removed in a future version of the CodeQL Action. " +
|
||||||
|
"Use the `analysis-kinds` input to configure different analysis kinds instead.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
config = await initConfig({
|
||||||
|
analysisKindsInput: getRequiredInput("analysis-kinds"),
|
||||||
languagesInput: getOptionalInput("languages"),
|
languagesInput: getOptionalInput("languages"),
|
||||||
queriesInput: getOptionalInput("queries"),
|
queriesInput: getOptionalInput("queries"),
|
||||||
|
qualityQueriesInput,
|
||||||
packsInput: getOptionalInput("packs"),
|
packsInput: getOptionalInput("packs"),
|
||||||
buildModeInput: getOptionalInput("build-mode"),
|
buildModeInput: getOptionalInput("build-mode"),
|
||||||
configFile,
|
configFile,
|
||||||
|
|||||||
@@ -61,11 +61,10 @@ export async function initCodeQL(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function initConfig(
|
export async function initConfig(
|
||||||
features: FeatureEnablement,
|
|
||||||
inputs: configUtils.InitConfigInputs,
|
inputs: configUtils.InitConfigInputs,
|
||||||
): Promise<configUtils.Config> {
|
): Promise<configUtils.Config> {
|
||||||
return await withGroupAsync("Load language configuration", async () => {
|
return await withGroupAsync("Load language configuration", async () => {
|
||||||
return await configUtils.initConfig(features, inputs);
|
return await configUtils.initConfig(inputs);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,6 @@ import * as gitUtils from "./git-utils";
|
|||||||
import { getRunnerLogger } from "./logging";
|
import { getRunnerLogger } from "./logging";
|
||||||
import {
|
import {
|
||||||
downloadOverlayBaseDatabaseFromCache,
|
downloadOverlayBaseDatabaseFromCache,
|
||||||
getCacheRestoreKeyPrefix,
|
|
||||||
getCacheSaveKey,
|
|
||||||
OverlayDatabaseMode,
|
OverlayDatabaseMode,
|
||||||
writeBaseDatabaseOidsFile,
|
writeBaseDatabaseOidsFile,
|
||||||
writeOverlayChangesFile,
|
writeOverlayChangesFile,
|
||||||
@@ -263,48 +261,3 @@ test(
|
|||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
test("overlay-base database cache keys remain stable", async (t) => {
|
|
||||||
const logger = getRunnerLogger(true);
|
|
||||||
const config = createTestConfig({ languages: ["python", "javascript"] });
|
|
||||||
const codeQlVersion = "2.23.0";
|
|
||||||
const commitOid = "abc123def456";
|
|
||||||
|
|
||||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
|
||||||
sinon.stub(gitUtils, "getCommitOid").resolves(commitOid);
|
|
||||||
sinon.stub(actionsUtil, "getWorkflowRunID").returns(12345);
|
|
||||||
sinon.stub(actionsUtil, "getWorkflowRunAttempt").returns(1);
|
|
||||||
|
|
||||||
const saveKey = await getCacheSaveKey(
|
|
||||||
config,
|
|
||||||
codeQlVersion,
|
|
||||||
"checkout-path",
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
const expectedSaveKey =
|
|
||||||
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123def456-12345-1";
|
|
||||||
t.is(
|
|
||||||
saveKey,
|
|
||||||
expectedSaveKey,
|
|
||||||
"Cache save key changed unexpectedly. " +
|
|
||||||
"This may indicate breaking changes in the cache key generation logic.",
|
|
||||||
);
|
|
||||||
|
|
||||||
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
|
|
||||||
config,
|
|
||||||
codeQlVersion,
|
|
||||||
);
|
|
||||||
const expectedRestoreKeyPrefix =
|
|
||||||
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-";
|
|
||||||
t.is(
|
|
||||||
restoreKeyPrefix,
|
|
||||||
expectedRestoreKeyPrefix,
|
|
||||||
"Cache restore key prefix changed unexpectedly. " +
|
|
||||||
"This may indicate breaking changes in the cache key generation logic.",
|
|
||||||
);
|
|
||||||
|
|
||||||
t.true(
|
|
||||||
saveKey.startsWith(restoreKeyPrefix),
|
|
||||||
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -4,19 +4,13 @@ import * as path from "path";
|
|||||||
|
|
||||||
import * as actionsCache from "@actions/cache";
|
import * as actionsCache from "@actions/cache";
|
||||||
|
|
||||||
import {
|
import { getRequiredInput, getTemporaryDirectory } from "./actions-util";
|
||||||
getRequiredInput,
|
|
||||||
getTemporaryDirectory,
|
|
||||||
getWorkflowRunAttempt,
|
|
||||||
getWorkflowRunID,
|
|
||||||
} from "./actions-util";
|
|
||||||
import { getAutomationID } from "./api-client";
|
import { getAutomationID } from "./api-client";
|
||||||
import { type CodeQL } from "./codeql";
|
import { type CodeQL } from "./codeql";
|
||||||
import { type Config } from "./config-utils";
|
import { type Config } from "./config-utils";
|
||||||
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
|
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
|
||||||
import { Logger, withGroupAsync } from "./logging";
|
import { Logger, withGroupAsync } from "./logging";
|
||||||
import {
|
import {
|
||||||
getErrorMessage,
|
|
||||||
isInTestMode,
|
isInTestMode,
|
||||||
tryGetFolderBytes,
|
tryGetFolderBytes,
|
||||||
waitForResultWithTimeLimit,
|
waitForResultWithTimeLimit,
|
||||||
@@ -40,10 +34,15 @@ export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
|
|||||||
* Actions Cache client library. Instead we place a limit on the uncompressed
|
* Actions Cache client library. Instead we place a limit on the uncompressed
|
||||||
* size of the overlay-base database.
|
* size of the overlay-base database.
|
||||||
*
|
*
|
||||||
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
|
* Assuming 2.5:1 compression ratio, the 15 GB limit on uncompressed data would
|
||||||
* translate to a limit of around 3 GB after compression.
|
* translate to a limit of around 6 GB after compression. This is a high limit
|
||||||
|
* compared to the default 10GB Actions Cache capacity, but enforcement of Actions
|
||||||
|
* Cache quotas is not immediate.
|
||||||
|
*
|
||||||
|
* TODO: revisit this limit before removing the restriction for overlay analysis
|
||||||
|
* to the `github` and `dsp-testing` orgs.
|
||||||
*/
|
*/
|
||||||
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
|
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15000;
|
||||||
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
|
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
|
||||||
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
|
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
|
||||||
|
|
||||||
@@ -272,7 +271,6 @@ export async function uploadOverlayBaseDatabaseToCache(
|
|||||||
config,
|
config,
|
||||||
codeQlVersion,
|
codeQlVersion,
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
logger,
|
|
||||||
);
|
);
|
||||||
logger.info(
|
logger.info(
|
||||||
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
|
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
|
||||||
@@ -450,28 +448,17 @@ export async function downloadOverlayBaseDatabaseFromCache(
|
|||||||
* The key consists of the restore key prefix (which does not include the
|
* The key consists of the restore key prefix (which does not include the
|
||||||
* commit SHA) and the commit SHA of the current checkout.
|
* commit SHA) and the commit SHA of the current checkout.
|
||||||
*/
|
*/
|
||||||
export async function getCacheSaveKey(
|
async function getCacheSaveKey(
|
||||||
config: Config,
|
config: Config,
|
||||||
codeQlVersion: string,
|
codeQlVersion: string,
|
||||||
checkoutPath: string,
|
checkoutPath: string,
|
||||||
logger: Logger,
|
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
let runId = 1;
|
|
||||||
let attemptId = 1;
|
|
||||||
try {
|
|
||||||
runId = getWorkflowRunID();
|
|
||||||
attemptId = getWorkflowRunAttempt();
|
|
||||||
} catch (e) {
|
|
||||||
logger.warning(
|
|
||||||
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
const sha = await getCommitOid(checkoutPath);
|
const sha = await getCommitOid(checkoutPath);
|
||||||
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
|
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
|
||||||
config,
|
config,
|
||||||
codeQlVersion,
|
codeQlVersion,
|
||||||
);
|
);
|
||||||
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
|
return `${restoreKeyPrefix}${sha}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -488,7 +475,7 @@ export async function getCacheSaveKey(
|
|||||||
* not include the commit SHA. This allows us to restore the most recent
|
* not include the commit SHA. This allows us to restore the most recent
|
||||||
* compatible overlay-base database.
|
* compatible overlay-base database.
|
||||||
*/
|
*/
|
||||||
export async function getCacheRestoreKeyPrefix(
|
async function getCacheRestoreKeyPrefix(
|
||||||
config: Config,
|
config: Config,
|
||||||
codeQlVersion: string,
|
codeQlVersion: string,
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
|
|||||||
@@ -1,196 +0,0 @@
|
|||||||
import * as core from "@actions/core";
|
|
||||||
import { v4 as uuidV4 } from "uuid";
|
|
||||||
|
|
||||||
import {
|
|
||||||
getActionVersion,
|
|
||||||
getOptionalInput,
|
|
||||||
getRequiredInput,
|
|
||||||
getTemporaryDirectory,
|
|
||||||
} from "./actions-util";
|
|
||||||
import { getGitHubVersion } from "./api-client";
|
|
||||||
import { CodeQL } from "./codeql";
|
|
||||||
import { EnvVar } from "./environment";
|
|
||||||
import { Features } from "./feature-flags";
|
|
||||||
import { initCodeQL } from "./init";
|
|
||||||
import { getActionsLogger, Logger } from "./logging";
|
|
||||||
import { getRepositoryNwo } from "./repository";
|
|
||||||
import { ToolsSource } from "./setup-codeql";
|
|
||||||
import {
|
|
||||||
ActionName,
|
|
||||||
InitStatusReport,
|
|
||||||
InitToolsDownloadFields,
|
|
||||||
createStatusReportBase,
|
|
||||||
getActionsStatus,
|
|
||||||
sendStatusReport,
|
|
||||||
} from "./status-report";
|
|
||||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
|
||||||
import {
|
|
||||||
checkDiskUsage,
|
|
||||||
checkForTimeout,
|
|
||||||
checkGitHubVersionInRange,
|
|
||||||
getRequiredEnvParam,
|
|
||||||
initializeEnvironment,
|
|
||||||
ConfigurationError,
|
|
||||||
wrapError,
|
|
||||||
checkActionVersion,
|
|
||||||
getErrorMessage,
|
|
||||||
} from "./util";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper function to send a full status report for this action.
|
|
||||||
*/
|
|
||||||
async function sendCompletedStatusReport(
|
|
||||||
startedAt: Date,
|
|
||||||
toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined,
|
|
||||||
toolsFeatureFlagsValid: boolean | undefined,
|
|
||||||
toolsSource: ToolsSource,
|
|
||||||
toolsVersion: string,
|
|
||||||
logger: Logger,
|
|
||||||
error?: Error,
|
|
||||||
): Promise<void> {
|
|
||||||
const statusReportBase = await createStatusReportBase(
|
|
||||||
ActionName.SetupCodeQL,
|
|
||||||
getActionsStatus(error),
|
|
||||||
startedAt,
|
|
||||||
undefined,
|
|
||||||
await checkDiskUsage(logger),
|
|
||||||
logger,
|
|
||||||
error?.message,
|
|
||||||
error?.stack,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (statusReportBase === undefined) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const initStatusReport: InitStatusReport = {
|
|
||||||
...statusReportBase,
|
|
||||||
tools_input: getOptionalInput("tools") || "",
|
|
||||||
tools_resolved_version: toolsVersion,
|
|
||||||
tools_source: toolsSource || ToolsSource.Unknown,
|
|
||||||
workflow_languages: "",
|
|
||||||
};
|
|
||||||
|
|
||||||
const initToolsDownloadFields: InitToolsDownloadFields = {};
|
|
||||||
|
|
||||||
if (toolsDownloadStatusReport?.downloadDurationMs !== undefined) {
|
|
||||||
initToolsDownloadFields.tools_download_duration_ms =
|
|
||||||
toolsDownloadStatusReport.downloadDurationMs;
|
|
||||||
}
|
|
||||||
if (toolsFeatureFlagsValid !== undefined) {
|
|
||||||
initToolsDownloadFields.tools_feature_flags_valid = toolsFeatureFlagsValid;
|
|
||||||
}
|
|
||||||
|
|
||||||
await sendStatusReport({ ...initStatusReport, ...initToolsDownloadFields });
|
|
||||||
}
|
|
||||||
|
|
||||||
/** The main behaviour of this action. */
|
|
||||||
async function run(): Promise<void> {
|
|
||||||
const startedAt = new Date();
|
|
||||||
const logger = getActionsLogger();
|
|
||||||
initializeEnvironment(getActionVersion());
|
|
||||||
|
|
||||||
let codeql: CodeQL;
|
|
||||||
let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined;
|
|
||||||
let toolsFeatureFlagsValid: boolean | undefined;
|
|
||||||
let toolsSource: ToolsSource;
|
|
||||||
let toolsVersion: string;
|
|
||||||
|
|
||||||
const apiDetails = {
|
|
||||||
auth: getRequiredInput("token"),
|
|
||||||
externalRepoAuth: getOptionalInput("external-repository-token"),
|
|
||||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
|
||||||
apiURL: getRequiredEnvParam("GITHUB_API_URL"),
|
|
||||||
};
|
|
||||||
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
|
||||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
|
||||||
checkActionVersion(getActionVersion(), gitHubVersion);
|
|
||||||
|
|
||||||
const repositoryNwo = getRepositoryNwo();
|
|
||||||
|
|
||||||
const features = new Features(
|
|
||||||
gitHubVersion,
|
|
||||||
repositoryNwo,
|
|
||||||
getTemporaryDirectory(),
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
|
|
||||||
const jobRunUuid = uuidV4();
|
|
||||||
logger.info(`Job run UUID is ${jobRunUuid}.`);
|
|
||||||
core.exportVariable(EnvVar.JOB_RUN_UUID, jobRunUuid);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const statusReportBase = await createStatusReportBase(
|
|
||||||
ActionName.SetupCodeQL,
|
|
||||||
"starting",
|
|
||||||
startedAt,
|
|
||||||
undefined,
|
|
||||||
await checkDiskUsage(logger),
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
if (statusReportBase !== undefined) {
|
|
||||||
await sendStatusReport(statusReportBase);
|
|
||||||
}
|
|
||||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
|
||||||
gitHubVersion.type,
|
|
||||||
);
|
|
||||||
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
|
||||||
const initCodeQLResult = await initCodeQL(
|
|
||||||
getOptionalInput("tools"),
|
|
||||||
apiDetails,
|
|
||||||
getTemporaryDirectory(),
|
|
||||||
gitHubVersion.type,
|
|
||||||
codeQLDefaultVersionInfo,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
codeql = initCodeQLResult.codeql;
|
|
||||||
toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport;
|
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
|
||||||
toolsSource = initCodeQLResult.toolsSource;
|
|
||||||
|
|
||||||
core.setOutput("codeql-path", codeql.getPath());
|
|
||||||
core.setOutput("codeql-version", (await codeql.getVersion()).version);
|
|
||||||
|
|
||||||
core.exportVariable(EnvVar.SETUP_CODEQL_ACTION_HAS_RUN, "true");
|
|
||||||
} catch (unwrappedError) {
|
|
||||||
const error = wrapError(unwrappedError);
|
|
||||||
core.setFailed(error.message);
|
|
||||||
const statusReportBase = await createStatusReportBase(
|
|
||||||
ActionName.SetupCodeQL,
|
|
||||||
error instanceof ConfigurationError ? "user-error" : "failure",
|
|
||||||
startedAt,
|
|
||||||
undefined,
|
|
||||||
await checkDiskUsage(logger),
|
|
||||||
logger,
|
|
||||||
error.message,
|
|
||||||
error.stack,
|
|
||||||
);
|
|
||||||
if (statusReportBase !== undefined) {
|
|
||||||
await sendStatusReport(statusReportBase);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await sendCompletedStatusReport(
|
|
||||||
startedAt,
|
|
||||||
toolsDownloadStatusReport,
|
|
||||||
toolsFeatureFlagsValid,
|
|
||||||
toolsSource,
|
|
||||||
toolsVersion,
|
|
||||||
logger,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Run the action and catch any unhandled errors. */
|
|
||||||
async function runWrapper(): Promise<void> {
|
|
||||||
try {
|
|
||||||
await run();
|
|
||||||
} catch (error) {
|
|
||||||
core.setFailed(`setup-codeql action failed: ${getErrorMessage(error)}`);
|
|
||||||
}
|
|
||||||
await checkForTimeout();
|
|
||||||
}
|
|
||||||
|
|
||||||
void runWrapper();
|
|
||||||
@@ -168,7 +168,7 @@ export function tryGetTagNameFromUrl(
|
|||||||
// assumes less about the structure of the URL.
|
// assumes less about the structure of the URL.
|
||||||
const match = matches[matches.length - 1];
|
const match = matches[matches.length - 1];
|
||||||
|
|
||||||
if (match?.length !== 2) {
|
if (match === null || match.length !== 2) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
`Could not determine tag name for URL ${url}. Matched ${JSON.stringify(
|
`Could not determine tag name for URL ${url}. Matched ${JSON.stringify(
|
||||||
match,
|
match,
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ async function runWrapper() {
|
|||||||
logger,
|
logger,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (config?.debugMode || core.isDebug()) {
|
if ((config && config.debugMode) || core.isDebug()) {
|
||||||
const logFilePath = core.getState("proxy-log-file");
|
const logFilePath = core.getState("proxy-log-file");
|
||||||
logger.info(
|
logger.info(
|
||||||
"Debug mode is on. Uploading proxy log as Actions debugging artifact...",
|
"Debug mode is on. Uploading proxy log as Actions debugging artifact...",
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ import { getRepositoryNwo } from "./repository";
|
|||||||
import { ToolsSource } from "./setup-codeql";
|
import { ToolsSource } from "./setup-codeql";
|
||||||
import {
|
import {
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
|
isHTTPError,
|
||||||
getRequiredEnvParam,
|
getRequiredEnvParam,
|
||||||
getCachedCodeQlVersion,
|
getCachedCodeQlVersion,
|
||||||
isInTestMode,
|
isInTestMode,
|
||||||
@@ -32,7 +33,6 @@ import {
|
|||||||
BuildMode,
|
BuildMode,
|
||||||
getErrorMessage,
|
getErrorMessage,
|
||||||
getTestingEnvironment,
|
getTestingEnvironment,
|
||||||
asHTTPError,
|
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
|
||||||
export enum ActionName {
|
export enum ActionName {
|
||||||
@@ -41,7 +41,6 @@ export enum ActionName {
|
|||||||
Init = "init",
|
Init = "init",
|
||||||
InitPost = "init-post",
|
InitPost = "init-post",
|
||||||
ResolveEnvironment = "resolve-environment",
|
ResolveEnvironment = "resolve-environment",
|
||||||
SetupCodeQL = "setup-codeql",
|
|
||||||
StartProxy = "start-proxy",
|
StartProxy = "start-proxy",
|
||||||
UploadSarif = "upload-sarif",
|
UploadSarif = "upload-sarif",
|
||||||
}
|
}
|
||||||
@@ -387,9 +386,9 @@ export async function createStatusReportBase(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const OUT_OF_DATE_MSG =
|
const OUT_OF_DATE_MSG =
|
||||||
"CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`.";
|
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
|
||||||
const INCOMPATIBLE_MSG =
|
const INCOMPATIBLE_MSG =
|
||||||
"CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`.";
|
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||||
@@ -429,9 +428,8 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const httpError = asHTTPError(e);
|
if (isHTTPError(e)) {
|
||||||
if (httpError !== undefined) {
|
switch (e.status) {
|
||||||
switch (httpError.status) {
|
|
||||||
case 403:
|
case 403:
|
||||||
if (
|
if (
|
||||||
getWorkflowEventName() === "push" &&
|
getWorkflowEventName() === "push" &&
|
||||||
@@ -439,20 +437,16 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
) {
|
) {
|
||||||
core.warning(
|
core.warning(
|
||||||
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
|
||||||
"Uploading CodeQL results requires write access. " +
|
"Uploading Code Scanning results requires write access. " +
|
||||||
'To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
|
||||||
`See ${DocUrl.SCANNING_ON_PUSH} for more information on how to configure these events.`,
|
`See ${DocUrl.SCANNING_ON_PUSH} for more information on how to configure these events.`,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
core.warning(
|
core.warning(e.message);
|
||||||
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
|
|
||||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
|
||||||
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
case 404:
|
case 404:
|
||||||
core.warning(httpError.message);
|
core.warning(e.message);
|
||||||
return;
|
return;
|
||||||
case 422:
|
case 422:
|
||||||
// schema incompatibility when reporting status
|
// schema incompatibility when reporting status
|
||||||
@@ -470,7 +464,7 @@ export async function sendStatusReport<S extends StatusReportBase>(
|
|||||||
// something else has gone wrong and the request/response will be logged by octokit
|
// something else has gone wrong and the request/response will be logged by octokit
|
||||||
// it's possible this is a transient error and we should continue scanning
|
// it's possible this is a transient error and we should continue scanning
|
||||||
core.warning(
|
core.warning(
|
||||||
`An unexpected error occurred when sending a status report: ${getErrorMessage(
|
`An unexpected error occurred when sending code scanning status report: ${getErrorMessage(
|
||||||
e,
|
e,
|
||||||
)}`,
|
)}`,
|
||||||
);
|
);
|
||||||
@@ -522,16 +516,6 @@ export interface InitWithConfigStatusReport extends InitStatusReport {
|
|||||||
config_file: string;
|
config_file: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Fields of the init status report populated when the tools source is `download`. */
|
|
||||||
export interface InitToolsDownloadFields {
|
|
||||||
/** Time taken to download the bundle, in milliseconds. */
|
|
||||||
tools_download_duration_ms?: number;
|
|
||||||
/**
|
|
||||||
* Whether the relevant tools dotcom feature flags have been misconfigured.
|
|
||||||
* Only populated if we attempt to determine the default version based on the dotcom feature flags. */
|
|
||||||
tools_feature_flags_valid?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Composes a `InitWithConfigStatusReport` from the given values.
|
* Composes a `InitWithConfigStatusReport` from the given values.
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -35,14 +35,14 @@ async function getTarVersion(): Promise<TarVersion> {
|
|||||||
// Return whether this is GNU tar or BSD tar, and the version number
|
// Return whether this is GNU tar or BSD tar, and the version number
|
||||||
if (stdout.includes("GNU tar")) {
|
if (stdout.includes("GNU tar")) {
|
||||||
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
|
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
|
||||||
if (!match?.[1]) {
|
if (!match || !match[1]) {
|
||||||
throw new Error("Failed to parse output of tar --version.");
|
throw new Error("Failed to parse output of tar --version.");
|
||||||
}
|
}
|
||||||
|
|
||||||
return { type: "gnu", version: match[1] };
|
return { type: "gnu", version: match[1] };
|
||||||
} else if (stdout.includes("bsdtar")) {
|
} else if (stdout.includes("bsdtar")) {
|
||||||
const match = stdout.match(/bsdtar ([0-9.]+)/);
|
const match = stdout.match(/bsdtar ([0-9.]+)/);
|
||||||
if (!match?.[1]) {
|
if (!match || !match[1]) {
|
||||||
throw new Error("Failed to parse output of tar --version.");
|
throw new Error("Failed to parse output of tar --version.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { TextDecoder } from "node:util";
|
|||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
import * as github from "@actions/github";
|
import * as github from "@actions/github";
|
||||||
import { ExecutionContext, TestFn } from "ava";
|
import { TestFn } from "ava";
|
||||||
import nock from "nock";
|
import nock from "nock";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
@@ -180,23 +180,6 @@ export function getRecordingLogger(messages: LoggedMessage[]): Logger {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export function checkExpectedLogMessages(
|
|
||||||
t: ExecutionContext<any>,
|
|
||||||
messages: LoggedMessage[],
|
|
||||||
expectedMessages: string[],
|
|
||||||
) {
|
|
||||||
for (const expectedMessage of expectedMessages) {
|
|
||||||
t.assert(
|
|
||||||
messages.some(
|
|
||||||
(msg) =>
|
|
||||||
typeof msg.message === "string" &&
|
|
||||||
msg.message.includes(expectedMessage),
|
|
||||||
),
|
|
||||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
||||||
export function mockFeatureFlagApiEndpoint(
|
export function mockFeatureFlagApiEndpoint(
|
||||||
responseStatusCode: number,
|
responseStatusCode: number,
|
||||||
|
|||||||
@@ -3,11 +3,13 @@ import * as semver from "semver";
|
|||||||
import type { VersionInfo } from "./codeql";
|
import type { VersionInfo } from "./codeql";
|
||||||
|
|
||||||
export enum ToolsFeature {
|
export enum ToolsFeature {
|
||||||
|
AnalysisSummaryV2IsDefault = "analysisSummaryV2Default",
|
||||||
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
|
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
|
||||||
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
|
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
|
||||||
ForceOverwrite = "forceOverwrite",
|
ForceOverwrite = "forceOverwrite",
|
||||||
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
|
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
|
||||||
PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib",
|
PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib",
|
||||||
|
SarifMergeRunsFromEqualCategory = "sarifMergeRunsFromEqualCategory",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ import * as gitUtils from "./git-utils";
|
|||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import {
|
import {
|
||||||
asHTTPError,
|
|
||||||
getErrorMessage,
|
getErrorMessage,
|
||||||
|
isHTTPError,
|
||||||
tryGetFolderBytes,
|
tryGetFolderBytes,
|
||||||
waitForResultWithTimeLimit,
|
waitForResultWithTimeLimit,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
@@ -236,7 +236,7 @@ export async function cleanupTrapCaches(
|
|||||||
}
|
}
|
||||||
return { trap_cache_cleanup_size_bytes: totalBytesCleanedUp };
|
return { trap_cache_cleanup_size_bytes: totalBytesCleanedUp };
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (asHTTPError(e)?.status === 403) {
|
if (isHTTPError(e) && e.status === 403) {
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Could not cleanup TRAP caches as the token did not have the required permissions. " +
|
"Could not cleanup TRAP caches as the token did not have the required permissions. " +
|
||||||
'To clean up TRAP caches, ensure the token has the "actions:write" permission. ' +
|
'To clean up TRAP caches, ensure the token has the "actions:write" permission. ' +
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import * as gitUtils from "./git-utils";
|
|||||||
import { initCodeQL } from "./init";
|
import { initCodeQL } from "./init";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
|
import { ToolsFeature } from "./tools-features";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import {
|
import {
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
@@ -268,6 +269,32 @@ async function combineSarifFilesUsingCLI(
|
|||||||
codeQL = initCodeQLResult.codeql;
|
codeQL = initCodeQLResult.codeql;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!(await codeQL.supportsFeature(
|
||||||
|
ToolsFeature.SarifMergeRunsFromEqualCategory,
|
||||||
|
))
|
||||||
|
) {
|
||||||
|
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
"The CodeQL CLI does not support merging SARIF files. Merging files in the action.",
|
||||||
|
);
|
||||||
|
|
||||||
|
if (
|
||||||
|
await shouldShowCombineSarifFilesDeprecationWarning(
|
||||||
|
sarifObjects,
|
||||||
|
gitHubVersion,
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
logger.warning(
|
||||||
|
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`,
|
||||||
|
);
|
||||||
|
core.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
|
||||||
|
}
|
||||||
|
|
||||||
|
return combineSarifFiles(sarifFiles, logger);
|
||||||
|
}
|
||||||
|
|
||||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||||
fs.mkdirSync(baseTempDir, { recursive: true });
|
fs.mkdirSync(baseTempDir, { recursive: true });
|
||||||
const outputDirectory = fs.mkdtempSync(path.resolve(baseTempDir, "output-"));
|
const outputDirectory = fs.mkdtempSync(path.resolve(baseTempDir, "output-"));
|
||||||
@@ -359,17 +386,16 @@ export async function uploadPayload(
|
|||||||
logger.info("Successfully uploaded results");
|
logger.info("Successfully uploaded results");
|
||||||
return response.data.id as string;
|
return response.data.id as string;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const httpError = util.asHTTPError(e);
|
if (util.isHTTPError(e)) {
|
||||||
if (httpError !== undefined) {
|
switch (e.status) {
|
||||||
switch (httpError.status) {
|
|
||||||
case 403:
|
case 403:
|
||||||
core.warning(httpError.message || GENERIC_403_MSG);
|
core.warning(e.message || GENERIC_403_MSG);
|
||||||
break;
|
break;
|
||||||
case 404:
|
case 404:
|
||||||
core.warning(httpError.message || GENERIC_404_MSG);
|
core.warning(e.message || GENERIC_404_MSG);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
core.warning(httpError.message);
|
core.warning(e.message);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -661,39 +687,51 @@ export function buildPayload(
|
|||||||
return payloadObj;
|
return payloadObj;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PostProcessingResults {
|
/**
|
||||||
sarif: util.SarifFile;
|
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
||||||
analysisKey: string;
|
* to.
|
||||||
environment: string;
|
*/
|
||||||
|
export async function uploadFiles(
|
||||||
|
inputSarifPath: string,
|
||||||
|
checkoutPath: string,
|
||||||
|
category: string | undefined,
|
||||||
|
features: FeatureEnablement,
|
||||||
|
logger: Logger,
|
||||||
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
|
): Promise<UploadResult> {
|
||||||
|
const sarifPaths = getSarifFilePaths(
|
||||||
|
inputSarifPath,
|
||||||
|
uploadTarget.sarifPredicate,
|
||||||
|
);
|
||||||
|
|
||||||
|
return uploadSpecifiedFiles(
|
||||||
|
sarifPaths,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
uploadTarget,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs post-processing of the SARIF files given by `sarifPaths`.
|
* Uploads the given array of SARIF files.
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param features Information about enabled features.
|
|
||||||
* @param checkoutPath The path where the repo was checked out at.
|
|
||||||
* @param sarifPaths The paths of the SARIF files to post-process.
|
|
||||||
* @param category The analysis category.
|
|
||||||
* @param analysis The analysis configuration.
|
|
||||||
*
|
|
||||||
* @returns Returns the results of post-processing the SARIF files,
|
|
||||||
* including the resulting SARIF file.
|
|
||||||
*/
|
*/
|
||||||
export async function postProcessSarifFiles(
|
export async function uploadSpecifiedFiles(
|
||||||
logger: Logger,
|
|
||||||
features: FeatureEnablement,
|
|
||||||
checkoutPath: string,
|
|
||||||
sarifPaths: string[],
|
sarifPaths: string[],
|
||||||
|
checkoutPath: string,
|
||||||
category: string | undefined,
|
category: string | undefined,
|
||||||
analysis: analyses.AnalysisConfig,
|
features: FeatureEnablement,
|
||||||
): Promise<PostProcessingResults> {
|
logger: Logger,
|
||||||
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
uploadTarget: analyses.AnalysisConfig,
|
||||||
|
): Promise<UploadResult> {
|
||||||
|
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
||||||
|
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||||
|
|
||||||
const gitHubVersion = await getGitHubVersion();
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
|
||||||
let sarif: SarifFile;
|
let sarif: SarifFile;
|
||||||
category = analysis.fixCategory(logger, category);
|
category = uploadTarget.fixCategory(logger, category);
|
||||||
|
|
||||||
if (sarifPaths.length > 1) {
|
if (sarifPaths.length > 1) {
|
||||||
// Validate that the files we were asked to upload are all valid SARIF files
|
// Validate that the files we were asked to upload are all valid SARIF files
|
||||||
@@ -729,113 +767,6 @@ export async function postProcessSarifFiles(
|
|||||||
environment,
|
environment,
|
||||||
);
|
);
|
||||||
|
|
||||||
return { sarif, analysisKey, environment };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Writes the post-processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param pathInput The input provided for `post-processed-sarif-path`.
|
|
||||||
* @param uploadTarget The upload target.
|
|
||||||
* @param processingResults The results of post-processing SARIF files.
|
|
||||||
*/
|
|
||||||
export async function writePostProcessedFiles(
|
|
||||||
logger: Logger,
|
|
||||||
pathInput: string | undefined,
|
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
|
||||||
postProcessingResults: PostProcessingResults,
|
|
||||||
) {
|
|
||||||
// If there's an explicit input, use that. Otherwise, use the value from the environment variable.
|
|
||||||
const outputPath = pathInput || util.getOptionalEnvVar(EnvVar.SARIF_DUMP_DIR);
|
|
||||||
|
|
||||||
// If we have a non-empty output path, write the SARIF file to it.
|
|
||||||
if (outputPath !== undefined) {
|
|
||||||
dumpSarifFile(
|
|
||||||
JSON.stringify(postProcessingResults.sarif),
|
|
||||||
outputPath,
|
|
||||||
logger,
|
|
||||||
uploadTarget,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
logger.debug(`Not writing post-processed SARIF files.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
|
|
||||||
* to.
|
|
||||||
*/
|
|
||||||
export async function uploadFiles(
|
|
||||||
inputSarifPath: string,
|
|
||||||
checkoutPath: string,
|
|
||||||
category: string | undefined,
|
|
||||||
features: FeatureEnablement,
|
|
||||||
logger: Logger,
|
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
|
||||||
): Promise<UploadResult> {
|
|
||||||
const sarifPaths = getSarifFilePaths(
|
|
||||||
inputSarifPath,
|
|
||||||
uploadTarget.sarifPredicate,
|
|
||||||
);
|
|
||||||
|
|
||||||
return uploadSpecifiedFiles(
|
|
||||||
sarifPaths,
|
|
||||||
checkoutPath,
|
|
||||||
category,
|
|
||||||
features,
|
|
||||||
logger,
|
|
||||||
uploadTarget,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads the given array of SARIF files.
|
|
||||||
*/
|
|
||||||
async function uploadSpecifiedFiles(
|
|
||||||
sarifPaths: string[],
|
|
||||||
checkoutPath: string,
|
|
||||||
category: string | undefined,
|
|
||||||
features: FeatureEnablement,
|
|
||||||
logger: Logger,
|
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
|
||||||
): Promise<UploadResult> {
|
|
||||||
const processingResults: PostProcessingResults = await postProcessSarifFiles(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
checkoutPath,
|
|
||||||
sarifPaths,
|
|
||||||
category,
|
|
||||||
uploadTarget,
|
|
||||||
);
|
|
||||||
|
|
||||||
return uploadPostProcessedFiles(
|
|
||||||
logger,
|
|
||||||
checkoutPath,
|
|
||||||
uploadTarget,
|
|
||||||
processingResults,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uploads the results of post-processing SARIF files to the specified upload target.
|
|
||||||
*
|
|
||||||
* @param logger The logger to use.
|
|
||||||
* @param checkoutPath The path at which the repository was checked out.
|
|
||||||
* @param uploadTarget The analysis configuration.
|
|
||||||
* @param postProcessingResults The results of post-processing SARIF files.
|
|
||||||
*
|
|
||||||
* @returns The results of uploading the `postProcessingResults` to `uploadTarget`.
|
|
||||||
*/
|
|
||||||
export async function uploadPostProcessedFiles(
|
|
||||||
logger: Logger,
|
|
||||||
checkoutPath: string,
|
|
||||||
uploadTarget: analyses.AnalysisConfig,
|
|
||||||
postProcessingResults: PostProcessingResults,
|
|
||||||
): Promise<UploadResult> {
|
|
||||||
logger.startGroup(`Uploading ${uploadTarget.name} results`);
|
|
||||||
|
|
||||||
const sarif = postProcessingResults.sarif;
|
|
||||||
const toolNames = util.getToolNames(sarif);
|
const toolNames = util.getToolNames(sarif);
|
||||||
|
|
||||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||||
@@ -843,6 +774,11 @@ export async function uploadPostProcessedFiles(
|
|||||||
logger.debug(`Serializing SARIF for upload`);
|
logger.debug(`Serializing SARIF for upload`);
|
||||||
const sarifPayload = JSON.stringify(sarif);
|
const sarifPayload = JSON.stringify(sarif);
|
||||||
|
|
||||||
|
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
|
||||||
|
if (dumpDir) {
|
||||||
|
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
|
||||||
|
}
|
||||||
|
|
||||||
logger.debug(`Compressing serialized SARIF`);
|
logger.debug(`Compressing serialized SARIF`);
|
||||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||||
@@ -850,13 +786,13 @@ export async function uploadPostProcessedFiles(
|
|||||||
const payload = buildPayload(
|
const payload = buildPayload(
|
||||||
await gitUtils.getCommitOid(checkoutPath),
|
await gitUtils.getCommitOid(checkoutPath),
|
||||||
await gitUtils.getRef(),
|
await gitUtils.getRef(),
|
||||||
postProcessingResults.analysisKey,
|
analysisKey,
|
||||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
zippedSarif,
|
zippedSarif,
|
||||||
actionsUtil.getWorkflowRunID(),
|
actionsUtil.getWorkflowRunID(),
|
||||||
actionsUtil.getWorkflowRunAttempt(),
|
actionsUtil.getWorkflowRunAttempt(),
|
||||||
checkoutURI,
|
checkoutURI,
|
||||||
postProcessingResults.environment,
|
environment,
|
||||||
toolNames,
|
toolNames,
|
||||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||||
);
|
);
|
||||||
@@ -902,14 +838,14 @@ function dumpSarifFile(
|
|||||||
fs.mkdirSync(outputDir, { recursive: true });
|
fs.mkdirSync(outputDir, { recursive: true });
|
||||||
} else if (!fs.lstatSync(outputDir).isDirectory()) {
|
} else if (!fs.lstatSync(outputDir).isDirectory()) {
|
||||||
throw new ConfigurationError(
|
throw new ConfigurationError(
|
||||||
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`,
|
`The path specified by the ${EnvVar.SARIF_DUMP_DIR} environment variable exists and is not a directory: ${outputDir}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const outputFile = path.resolve(
|
const outputFile = path.resolve(
|
||||||
outputDir,
|
outputDir,
|
||||||
`upload${uploadTarget.sarifExtension}`,
|
`upload${uploadTarget.sarifExtension}`,
|
||||||
);
|
);
|
||||||
logger.info(`Writing processed SARIF file to ${outputFile}`);
|
logger.info(`Dumping processed SARIF file to ${outputFile}`);
|
||||||
fs.writeFileSync(outputFile, sarifPayload);
|
fs.writeFileSync(outputFile, sarifPayload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import {
|
|||||||
isThirdPartyAnalysis,
|
isThirdPartyAnalysis,
|
||||||
} from "./status-report";
|
} from "./status-report";
|
||||||
import * as upload_lib from "./upload-lib";
|
import * as upload_lib from "./upload-lib";
|
||||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
import { uploadSarif } from "./upload-sarif";
|
||||||
import {
|
import {
|
||||||
ConfigurationError,
|
ConfigurationError,
|
||||||
checkActionVersion,
|
checkActionVersion,
|
||||||
@@ -90,10 +90,9 @@ async function run() {
|
|||||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||||
const category = actionsUtil.getOptionalInput("category");
|
const category = actionsUtil.getOptionalInput("category");
|
||||||
|
|
||||||
const uploadResults = await postProcessAndUploadSarif(
|
const uploadResults = await uploadSarif(
|
||||||
logger,
|
logger,
|
||||||
features,
|
features,
|
||||||
"always",
|
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
sarifPath,
|
sarifPath,
|
||||||
category,
|
category,
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import { getRunnerLogger } from "./logging";
|
|||||||
import { createFeatures, setupTests } from "./testing-utils";
|
import { createFeatures, setupTests } from "./testing-utils";
|
||||||
import { UploadResult } from "./upload-lib";
|
import { UploadResult } from "./upload-lib";
|
||||||
import * as uploadLib from "./upload-lib";
|
import * as uploadLib from "./upload-lib";
|
||||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
import { uploadSarif } from "./upload-sarif";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
@@ -19,27 +19,7 @@ interface UploadSarifExpectedResult {
|
|||||||
expectedFiles?: string[];
|
expectedFiles?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
function mockPostProcessSarifFiles() {
|
const uploadSarifMacro = test.macro({
|
||||||
const postProcessSarifFiles = sinon.stub(uploadLib, "postProcessSarifFiles");
|
|
||||||
|
|
||||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
|
||||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
|
||||||
postProcessSarifFiles
|
|
||||||
.withArgs(
|
|
||||||
sinon.match.any,
|
|
||||||
sinon.match.any,
|
|
||||||
sinon.match.any,
|
|
||||||
sinon.match.any,
|
|
||||||
sinon.match.any,
|
|
||||||
analysisConfig,
|
|
||||||
)
|
|
||||||
.resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" });
|
|
||||||
}
|
|
||||||
|
|
||||||
return postProcessSarifFiles;
|
|
||||||
}
|
|
||||||
|
|
||||||
const postProcessAndUploadSarifMacro = test.macro({
|
|
||||||
exec: async (
|
exec: async (
|
||||||
t: ExecutionContext<unknown>,
|
t: ExecutionContext<unknown>,
|
||||||
sarifFiles: string[],
|
sarifFiles: string[],
|
||||||
@@ -53,16 +33,21 @@ const postProcessAndUploadSarifMacro = test.macro({
|
|||||||
|
|
||||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
||||||
|
|
||||||
const postProcessSarifFiles = mockPostProcessSarifFiles();
|
const uploadSpecifiedFiles = sinon.stub(
|
||||||
const uploadPostProcessedFiles = sinon.stub(
|
|
||||||
uploadLib,
|
uploadLib,
|
||||||
"uploadPostProcessedFiles",
|
"uploadSpecifiedFiles",
|
||||||
);
|
);
|
||||||
|
|
||||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
uploadSpecifiedFiles
|
||||||
uploadPostProcessedFiles
|
.withArgs(
|
||||||
.withArgs(logger, sinon.match.any, analysisConfig, sinon.match.any)
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
|
getAnalysisConfig(analysisKind),
|
||||||
|
)
|
||||||
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
|
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,57 +56,53 @@ const postProcessAndUploadSarifMacro = test.macro({
|
|||||||
fs.writeFileSync(sarifFile, "");
|
fs.writeFileSync(sarifFile, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
const actual = await postProcessAndUploadSarif(
|
const actual = await uploadSarif(logger, features, "", testPath);
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
"always",
|
|
||||||
"",
|
|
||||||
testPath,
|
|
||||||
);
|
|
||||||
|
|
||||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||||
const analysisKindResult = expectedResult[analysisKind];
|
const analysisKindResult = expectedResult[analysisKind];
|
||||||
if (analysisKindResult) {
|
if (analysisKindResult) {
|
||||||
// We are expecting a result for this analysis kind, check that we have it.
|
// We are expecting a result for this analysis kind, check that we have it.
|
||||||
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
|
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
|
||||||
// Additionally, check that the mocked `postProcessSarifFiles` was called with only the file paths
|
// Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths
|
||||||
// that we expected it to be called with.
|
// that we expected it to be called with.
|
||||||
t.assert(
|
t.assert(
|
||||||
postProcessSarifFiles.calledWith(
|
uploadSpecifiedFiles.calledWith(
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
sinon.match.any,
|
|
||||||
analysisKindResult.expectedFiles?.map(toFullPath) ??
|
analysisKindResult.expectedFiles?.map(toFullPath) ??
|
||||||
fullSarifPaths,
|
fullSarifPaths,
|
||||||
sinon.match.any,
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
getAnalysisConfig(analysisKind),
|
getAnalysisConfig(analysisKind),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
|
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
|
||||||
// is also returned by our mocked `uploadProcessedFiles` when there is no expected result for this
|
// is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this
|
||||||
// analysis kind.
|
// analysis kind.
|
||||||
t.is(actual[analysisKind], undefined);
|
t.is(actual[analysisKind], undefined);
|
||||||
// Therefore, we also check that the mocked `uploadProcessedFiles` was not called for this analysis kind.
|
// Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind.
|
||||||
t.assert(
|
t.assert(
|
||||||
!uploadPostProcessedFiles.calledWith(
|
!uploadSpecifiedFiles.calledWith(
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
sinon.match.any,
|
||||||
|
features,
|
||||||
logger,
|
logger,
|
||||||
sinon.match.any,
|
|
||||||
getAnalysisConfig(analysisKind),
|
getAnalysisConfig(analysisKind),
|
||||||
sinon.match.any,
|
|
||||||
),
|
),
|
||||||
`uploadProcessedFiles was called for ${analysisKind}, but should not have been.`,
|
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
title: (providedTitle = "") => `processAndUploadSarif - ${providedTitle}`,
|
title: (providedTitle = "") => `uploadSarif - ${providedTitle}`,
|
||||||
});
|
});
|
||||||
|
|
||||||
test(
|
test(
|
||||||
"SARIF file",
|
"SARIF file",
|
||||||
postProcessAndUploadSarifMacro,
|
uploadSarifMacro,
|
||||||
["test.sarif"],
|
["test.sarif"],
|
||||||
(tempDir) => path.join(tempDir, "test.sarif"),
|
(tempDir) => path.join(tempDir, "test.sarif"),
|
||||||
{
|
{
|
||||||
@@ -136,7 +117,7 @@ test(
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
"JSON file",
|
"JSON file",
|
||||||
postProcessAndUploadSarifMacro,
|
uploadSarifMacro,
|
||||||
["test.json"],
|
["test.json"],
|
||||||
(tempDir) => path.join(tempDir, "test.json"),
|
(tempDir) => path.join(tempDir, "test.json"),
|
||||||
{
|
{
|
||||||
@@ -151,7 +132,7 @@ test(
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
"Code Scanning files",
|
"Code Scanning files",
|
||||||
postProcessAndUploadSarifMacro,
|
uploadSarifMacro,
|
||||||
["test.json", "test.sarif"],
|
["test.json", "test.sarif"],
|
||||||
undefined,
|
undefined,
|
||||||
{
|
{
|
||||||
@@ -167,7 +148,7 @@ test(
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
"Code Quality file",
|
"Code Quality file",
|
||||||
postProcessAndUploadSarifMacro,
|
uploadSarifMacro,
|
||||||
["test.quality.sarif"],
|
["test.quality.sarif"],
|
||||||
(tempDir) => path.join(tempDir, "test.quality.sarif"),
|
(tempDir) => path.join(tempDir, "test.quality.sarif"),
|
||||||
{
|
{
|
||||||
@@ -182,7 +163,7 @@ test(
|
|||||||
|
|
||||||
test(
|
test(
|
||||||
"Mixed files",
|
"Mixed files",
|
||||||
postProcessAndUploadSarifMacro,
|
uploadSarifMacro,
|
||||||
["test.sarif", "test.quality.sarif"],
|
["test.sarif", "test.quality.sarif"],
|
||||||
undefined,
|
undefined,
|
||||||
{
|
{
|
||||||
@@ -202,65 +183,3 @@ test(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => {
|
|
||||||
await util.withTmpDir(async (tempDir) => {
|
|
||||||
const logger = getRunnerLogger(true);
|
|
||||||
const features = createFeatures([]);
|
|
||||||
|
|
||||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
|
||||||
|
|
||||||
const postProcessSarifFiles = mockPostProcessSarifFiles();
|
|
||||||
const uploadPostProcessedFiles = sinon.stub(
|
|
||||||
uploadLib,
|
|
||||||
"uploadPostProcessedFiles",
|
|
||||||
);
|
|
||||||
|
|
||||||
fs.writeFileSync(toFullPath("test.sarif"), "");
|
|
||||||
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
|
|
||||||
|
|
||||||
const actual = await postProcessAndUploadSarif(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
"never",
|
|
||||||
"",
|
|
||||||
tempDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
t.truthy(actual);
|
|
||||||
t.assert(postProcessSarifFiles.calledTwice);
|
|
||||||
t.assert(uploadPostProcessedFiles.notCalled);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => {
|
|
||||||
await util.withTmpDir(async (tempDir) => {
|
|
||||||
const logger = getRunnerLogger(true);
|
|
||||||
const features = createFeatures([]);
|
|
||||||
|
|
||||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
|
||||||
|
|
||||||
const postProcessSarifFiles = mockPostProcessSarifFiles();
|
|
||||||
|
|
||||||
fs.writeFileSync(toFullPath("test.sarif"), "");
|
|
||||||
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
|
|
||||||
|
|
||||||
const postProcessedOutPath = path.join(tempDir, "post-processed");
|
|
||||||
const actual = await postProcessAndUploadSarif(
|
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
"never",
|
|
||||||
"",
|
|
||||||
tempDir,
|
|
||||||
"",
|
|
||||||
postProcessedOutPath,
|
|
||||||
);
|
|
||||||
|
|
||||||
t.truthy(actual);
|
|
||||||
t.assert(postProcessSarifFiles.calledTwice);
|
|
||||||
t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif")));
|
|
||||||
t.assert(
|
|
||||||
fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { UploadKind } from "./actions-util";
|
|
||||||
import * as analyses from "./analyses";
|
import * as analyses from "./analyses";
|
||||||
import { FeatureEnablement } from "./feature-flags";
|
import { FeatureEnablement } from "./feature-flags";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
@@ -11,26 +10,22 @@ export type UploadSarifResults = Partial<
|
|||||||
>;
|
>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services.
|
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
|
||||||
*
|
*
|
||||||
* @param logger The logger to use.
|
* @param logger The logger to use.
|
||||||
* @param features Information about enabled features.
|
* @param features Information about enabled features.
|
||||||
* @param uploadKind The kind of upload that is requested.
|
|
||||||
* @param checkoutPath The path where the repository was checked out at.
|
* @param checkoutPath The path where the repository was checked out at.
|
||||||
* @param sarifPath The path to the file or directory to upload.
|
* @param sarifPath The path to the file or directory to upload.
|
||||||
* @param category The analysis category.
|
* @param category The analysis category.
|
||||||
* @param postProcessedOutputPath The path to a directory to which the post-processed SARIF files should be written to.
|
|
||||||
*
|
*
|
||||||
* @returns A partial mapping from analysis kinds to the upload results.
|
* @returns A partial mapping from analysis kinds to the upload results.
|
||||||
*/
|
*/
|
||||||
export async function postProcessAndUploadSarif(
|
export async function uploadSarif(
|
||||||
logger: Logger,
|
logger: Logger,
|
||||||
features: FeatureEnablement,
|
features: FeatureEnablement,
|
||||||
uploadKind: UploadKind,
|
|
||||||
checkoutPath: string,
|
checkoutPath: string,
|
||||||
sarifPath: string,
|
sarifPath: string,
|
||||||
category?: string,
|
category?: string,
|
||||||
postProcessedOutputPath?: string,
|
|
||||||
): Promise<UploadSarifResults> {
|
): Promise<UploadSarifResults> {
|
||||||
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
|
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
|
||||||
logger,
|
logger,
|
||||||
@@ -42,34 +37,15 @@ export async function postProcessAndUploadSarif(
|
|||||||
sarifGroups,
|
sarifGroups,
|
||||||
)) {
|
)) {
|
||||||
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
|
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
|
||||||
const postProcessingResults = await upload_lib.postProcessSarifFiles(
|
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
|
||||||
logger,
|
|
||||||
features,
|
|
||||||
checkoutPath,
|
|
||||||
sarifFiles,
|
sarifFiles,
|
||||||
category,
|
|
||||||
analysisConfig,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Write the post-processed SARIF files to disk. This will only write them if needed based on user inputs
|
|
||||||
// or environment variables.
|
|
||||||
await upload_lib.writePostProcessedFiles(
|
|
||||||
logger,
|
|
||||||
postProcessedOutputPath,
|
|
||||||
analysisConfig,
|
|
||||||
postProcessingResults,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Only perform the actual upload of the post-processed files if `uploadKind` is `always`.
|
|
||||||
if (uploadKind === "always") {
|
|
||||||
uploadResults[analysisKind] = await upload_lib.uploadPostProcessedFiles(
|
|
||||||
logger,
|
|
||||||
checkoutPath,
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
features,
|
||||||
|
logger,
|
||||||
analysisConfig,
|
analysisConfig,
|
||||||
postProcessingResults,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return uploadResults;
|
return uploadResults;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -252,35 +252,6 @@ test("allowed API versions", async (t) => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getRequiredEnvParam - gets environment variables", (t) => {
|
|
||||||
process.env.SOME_UNIT_TEST_VAR = "foo";
|
|
||||||
const result = util.getRequiredEnvParam("SOME_UNIT_TEST_VAR");
|
|
||||||
t.is(result, "foo");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getRequiredEnvParam - throws if an environment variable isn't set", (t) => {
|
|
||||||
t.throws(() => util.getRequiredEnvParam("SOME_UNIT_TEST_VAR"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getOptionalEnvVar - gets environment variables", (t) => {
|
|
||||||
process.env.SOME_UNIT_TEST_VAR = "foo";
|
|
||||||
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
|
|
||||||
t.is(result, "foo");
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getOptionalEnvVar - gets undefined for empty environment variables", (t) => {
|
|
||||||
process.env.SOME_UNIT_TEST_VAR = "";
|
|
||||||
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
|
|
||||||
t.is(result, undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getOptionalEnvVar - doesn't throw for undefined environment variables", (t) => {
|
|
||||||
t.notThrows(() => {
|
|
||||||
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
|
|
||||||
t.is(result, undefined);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
test("doesDirectoryExist", async (t) => {
|
test("doesDirectoryExist", async (t) => {
|
||||||
// Returns false if no file/dir of this name exists
|
// Returns false if no file/dir of this name exists
|
||||||
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
t.false(util.doesDirectoryExist("non-existent-file.txt"));
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user