mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 00:38:30 +08:00
Compare commits
90 Commits
codeql-bun
...
codeql-bun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8aff97f12c | ||
|
|
31a2afec21 | ||
|
|
6a5e69e74f | ||
|
|
c3b6fce4ee | ||
|
|
8aa42f1f11 | ||
|
|
29a5553722 | ||
|
|
e260194d76 | ||
|
|
a8cabafa56 | ||
|
|
862a512899 | ||
|
|
71510779c2 | ||
|
|
81a1ec0fb3 | ||
|
|
60c8cda203 | ||
|
|
18fe527fa8 | ||
|
|
f04ca7c11c | ||
|
|
cd983e71c6 | ||
|
|
2ec046b5ac | ||
|
|
72bd9cbe62 | ||
|
|
ac0112f7f1 | ||
|
|
77b1f7e44c | ||
|
|
aa07b3894b | ||
|
|
c44e6c6096 | ||
|
|
ae0a2603c1 | ||
|
|
bfcbb093ac | ||
|
|
4b73c4f99e | ||
|
|
6c6b550a41 | ||
|
|
a3141c7a07 | ||
|
|
13cb2ca824 | ||
|
|
7e2585030f | ||
|
|
4b37e17ec1 | ||
|
|
a12a861b82 | ||
|
|
993ca05cd7 | ||
|
|
a31200481f | ||
|
|
4ed5abeff3 | ||
|
|
734292689d | ||
|
|
5767f918ef | ||
|
|
f248a57d3b | ||
|
|
40542d38bc | ||
|
|
55ffe2dcbb | ||
|
|
3c7f7914e6 | ||
|
|
e76b89fe31 | ||
|
|
98f6408f34 | ||
|
|
ec3cf9c605 | ||
|
|
f246f20ec4 | ||
|
|
4b53723d6b | ||
|
|
de9f112cd1 | ||
|
|
f1a4ff53b4 | ||
|
|
624418cb40 | ||
|
|
f0a1281661 | ||
|
|
f0b3ef9e9c | ||
|
|
3920e2d8ae | ||
|
|
be55631a21 | ||
|
|
cc7986c02b | ||
|
|
f3a27d6945 | ||
|
|
aecd03235b | ||
|
|
96c8872f06 | ||
|
|
b709139433 | ||
|
|
5dd73678a4 | ||
|
|
4c1ccc4a5e | ||
|
|
c2f5185572 | ||
|
|
297ec80a46 | ||
|
|
b0f8861cea | ||
|
|
2ee8edc7f0 | ||
|
|
eba983fb9b | ||
|
|
e2481f79eb | ||
|
|
17ae47e4b4 | ||
|
|
2c4b2a1b38 | ||
|
|
0c7f674831 | ||
|
|
2fafb297de | ||
|
|
160e3fe79a | ||
|
|
34d48f825c | ||
|
|
e862e8fc76 | ||
|
|
099de40ecf | ||
|
|
cc00a9d478 | ||
|
|
082bdf06b5 | ||
|
|
44edb7c4b5 | ||
|
|
43c3ed9c28 | ||
|
|
701cea34ba | ||
|
|
c27c6c7642 | ||
|
|
adb28963c0 | ||
|
|
362f9a2522 | ||
|
|
6e1dab28b6 | ||
|
|
6c869f8b03 | ||
|
|
919e4caca1 | ||
|
|
1a17c59fb0 | ||
|
|
b27aed78f5 | ||
|
|
807578363a | ||
|
|
5915e70486 | ||
|
|
6de05e4b24 | ||
|
|
b16314e16c | ||
|
|
e5c3375225 |
10
.github/prepare-test/action.yml
vendored
10
.github/prepare-test/action.yml
vendored
@@ -22,17 +22,17 @@ runs:
|
||||
run: |
|
||||
if [[ ${{ inputs.version }} == "nightly-latest" ]]; then
|
||||
export LATEST=`gh release list --repo dsp-testing/codeql-cli-nightlies -L 1 | cut -f 3`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/$LATEST/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"nightly"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-$VERSION-manual/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == *"stable"* ]]; then
|
||||
export VERSION=`echo ${{ inputs.version }} | sed -e 's/^.*\-//'`
|
||||
echo "::set-output name=tools-url::https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz"
|
||||
echo "tools-url=https://github.com/github/codeql-action/releases/download/codeql-bundle-$VERSION/codeql-bundle.tar.gz" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == "latest" ]]; then
|
||||
echo "::set-output name=tools-url::latest"
|
||||
echo "tools-url=latest" >> $GITHUB_OUTPUT
|
||||
elif [[ ${{ inputs.version }} == "cached" ]]; then
|
||||
echo "::set-output name=tools-url::"
|
||||
echo "tools-url=" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "::error Unrecognized version specified!"
|
||||
fi
|
||||
|
||||
16
.github/workflows/__analyze-ref-input.yml
generated
vendored
16
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Analyze: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
|
||||
4
.github/workflows/__autobuild-action.yml
generated
vendored
4
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -29,9 +29,7 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
name: autobuild-action
|
||||
timeout-minutes: 45
|
||||
|
||||
16
.github/workflows/__go-custom-queries.yml
generated
vendored
16
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom queries'
|
||||
timeout-minutes: 45
|
||||
|
||||
6
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
6
.github/workflows/__go-custom-tracing-autobuild.yml
generated
vendored
@@ -25,15 +25,15 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
|
||||
16
.github/workflows/__go-custom-tracing.yml
generated
vendored
16
.github/workflows/__go-custom-tracing.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Custom tracing'
|
||||
timeout-minutes: 45
|
||||
|
||||
6
.github/workflows/__go-reconciled-tracing-autobuilder.yml
generated
vendored
6
.github/workflows/__go-reconciled-tracing-autobuilder.yml
generated
vendored
@@ -25,15 +25,15 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
|
||||
16
.github/workflows/__go-reconciled-tracing-custom-build-steps.yml
generated
vendored
16
.github/workflows/__go-reconciled-tracing-custom-build-steps.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Go: Reconciled tracing with custom build steps'
|
||||
timeout-minutes: 45
|
||||
|
||||
6
.github/workflows/__go-reconciled-tracing-legacy-workflow.yml
generated
vendored
6
.github/workflows/__go-reconciled-tracing-legacy-workflow.yml
generated
vendored
@@ -25,15 +25,15 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
|
||||
4
.github/workflows/__init-with-registries.yml
generated
vendored
4
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -29,9 +29,7 @@ jobs:
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Download using registries'
|
||||
timeout-minutes: 45
|
||||
|
||||
6
.github/workflows/__multi-language-autodetect.yml
generated
vendored
6
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -25,15 +25,15 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
|
||||
10
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input passed to the CLI'
|
||||
timeout-minutes: 45
|
||||
|
||||
10
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config and input'
|
||||
timeout-minutes: 45
|
||||
|
||||
10
.github/workflows/__packaging-config-js.yml
generated
vendored
10
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Config file'
|
||||
timeout-minutes: 45
|
||||
|
||||
10
.github/workflows/__packaging-inputs-js.yml
generated
vendored
10
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -29,23 +29,19 @@ jobs:
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: 'Packaging: Action input'
|
||||
timeout-minutes: 45
|
||||
|
||||
16
.github/workflows/__remote-config.yml
generated
vendored
16
.github/workflows/__remote-config.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Remote config file
|
||||
timeout-minutes: 45
|
||||
|
||||
10
.github/workflows/__rubocop-multi-language.yml
generated
vendored
10
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -25,18 +25,8 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: RuboCop multi-language
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
4
.github/workflows/__test-proxy.yml
generated
vendored
4
.github/workflows/__test-proxy.yml
generated
vendored
@@ -51,10 +51,10 @@ jobs:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
INTERNAL_CODEQL_ACTION_DEBUG_LOC: true
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
image: ubuntu:22.04
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: datadog/squid:latest
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
|
||||
16
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
16
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: "Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
timeout-minutes: 45
|
||||
|
||||
16
.github/workflows/__with-checkout-path.yml
generated
vendored
16
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -25,19 +25,19 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: windows-2019
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: windows-2019
|
||||
version: stable-20210319
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
@@ -47,23 +47,19 @@ jobs:
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-2019
|
||||
version: latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-2019
|
||||
version: nightly-latest
|
||||
- os: windows-2022
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Use a custom `checkout_path`
|
||||
timeout-minutes: 45
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -61,7 +61,7 @@ jobs:
|
||||
|
||||
# Output a JSON-encoded list with the distinct versions to test against.
|
||||
echo "Suggested matrix config for analysis job: $VERSIONS_JSON"
|
||||
echo "::set-output name=versions::${VERSIONS_JSON}"
|
||||
echo "versions=${VERSIONS_JSON}" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
needs: [check-codeql-versions]
|
||||
|
||||
@@ -24,7 +24,6 @@ jobs:
|
||||
continue-on-error: true
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
|
||||
39
.github/workflows/debug-artifacts.yml
vendored
39
.github/workflows/debug-artifacts.yml
vendored
@@ -19,8 +19,31 @@ jobs:
|
||||
upload-artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
version: [stable-20210308, stable-20210319, stable-20210809, cached, latest, nightly-latest]
|
||||
include:
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: macos-latest
|
||||
version: stable-20210308
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
- os: macos-latest
|
||||
version: stable-20210319
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: macos-latest
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Upload debug artifacts
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -58,11 +81,17 @@ jobs:
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
|
||||
VERSIONS="stable-20210308 stable-20210319 stable-20210809 cached latest nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for os in $OPERATING_SYSTEMS; do
|
||||
for version in $VERSIONS; do
|
||||
for version in $VERSIONS; do
|
||||
if [[ "$version" =~ stable-(20210308|20210319|20210809) ]]; then
|
||||
# Note the absence of the period in "ubuntu-2004": this is present in the image name
|
||||
# but not the artifact name
|
||||
OPERATING_SYSTEMS="ubuntu-2004 macos-latest"
|
||||
else
|
||||
OPERATING_SYSTEMS="ubuntu-latest macos-latest"
|
||||
fi
|
||||
for os in $OPERATING_SYSTEMS; do
|
||||
pushd "./my-debug-artifacts-$os-$version"
|
||||
echo "Artifacts from version $version on $os:"
|
||||
for language in $LANGUAGES; do
|
||||
|
||||
10
.github/workflows/post-release-mergeback.yml
vendored
10
.github/workflows/post-release-mergeback.yml
vendored
@@ -47,11 +47,10 @@ jobs:
|
||||
id: getVersion
|
||||
run: |
|
||||
VERSION="v$(jq '.version' -r 'package.json')"
|
||||
echo "::set-output name=version::${VERSION}"
|
||||
echo "version=${VERSION}" >> $GITHUB_OUTPUT
|
||||
short_sha="${GITHUB_SHA:0:8}"
|
||||
NEW_BRANCH="mergeback/${VERSION}-to-${BASE_BRANCH}-${short_sha}"
|
||||
echo "::set-output name=newBranch::${NEW_BRANCH}"
|
||||
|
||||
echo "newBranch=${NEW_BRANCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Dump branches
|
||||
env:
|
||||
@@ -77,7 +76,7 @@ jobs:
|
||||
exists="$?"
|
||||
if [ "${exists}" -eq 0 ]; then
|
||||
echo "Tag ${VERSION} exists. Not going to re-release."
|
||||
echo "::set-output name=exists::true"
|
||||
echo "exists=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Tag ${VERSION} does not exist yet."
|
||||
fi
|
||||
@@ -122,7 +121,8 @@ jobs:
|
||||
- [ ] Remove and re-add the "Update dependencies" label to the PR to trigger just this workflow.
|
||||
- [ ] Wait for the "Update dependencies" workflow to push a commit updating the dependencies.
|
||||
- [ ] Mark the PR as ready for review to trigger the full set of PR checks.
|
||||
- [ ] Approve and merge the PR. Make sure `Create a merge commit` is selected rather than `Squash and merge` or `Rebase and merge`.
|
||||
- [ ] Approve and merge the PR. When merging the PR, make sure "Create a merge commit" is
|
||||
selected rather than "Squash and merge" or "Rebase and merge".
|
||||
EOF
|
||||
)
|
||||
|
||||
|
||||
1
.github/workflows/pr-checks.yml
vendored
1
.github/workflows/pr-checks.yml
vendored
@@ -16,7 +16,6 @@ jobs:
|
||||
timeout-minutes: 45
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
node-types-version: [12.12, current]
|
||||
|
||||
|
||||
8
.github/workflows/python-deps.yml
vendored
8
.github/workflows/python-deps.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, ubuntu-22.04, macos-latest]
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
python_deps_type: [pipenv, poetry, requirements, setup_py]
|
||||
python_version: [2, 3]
|
||||
exclude:
|
||||
@@ -65,7 +65,7 @@ jobs:
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/${PYTHON_DEPS_TYPE}/requests-${PYTHON_VERSION}
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-20.04*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, ubuntu-22.04, macos-latest]
|
||||
os: [ubuntu-20.04, ubuntu-22.04, macos-latest]
|
||||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
cd $GITHUB_WORKSPACE/python-setup/tests/requirements/non-standard-location
|
||||
|
||||
case ${{ matrix.os }} in
|
||||
ubuntu-latest*) basePath="/opt";;
|
||||
ubuntu-20.04*) basePath="/opt";;
|
||||
ubuntu-22.04*) basePath="/opt";;
|
||||
macos-latest*) basePath="/Users/runner";;
|
||||
esac
|
||||
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210809
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
|
||||
@@ -24,9 +24,9 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210308
|
||||
- os: ubuntu-latest
|
||||
- os: ubuntu-20.04
|
||||
version: stable-20210319
|
||||
name: Test unsetting environment variables
|
||||
timeout-minutes: 45
|
||||
|
||||
17
CHANGELOG.md
17
CHANGELOG.md
@@ -4,6 +4,23 @@
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.1.31 - 04 Nov 2022
|
||||
|
||||
- The `rb/weak-cryptographic-algorithm` Ruby query has been updated to no longer report uses of hash functions such as `MD5` and `SHA1` even if they are known to be weak. These hash algorithms are used very often in non-sensitive contexts, making the query too imprecise in practice. For more information, see the corresponding change in the [github/codeql repository](https://github.com/github/codeql/pull/11129). [#1344](https://github.com/github/codeql-action/pull/1344)
|
||||
|
||||
## 2.1.30 - 02 Nov 2022
|
||||
|
||||
- Improve the error message when using CodeQL bundle version 2.7.2 and earlier in a workflow that runs on a runner image such as `ubuntu-22.04` that uses glibc version 2.34 and later. [#1334](https://github.com/github/codeql-action/pull/1334)
|
||||
|
||||
## 2.1.29 - 26 Oct 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.2. [#1320](https://github.com/github/codeql-action/pull/1320)
|
||||
|
||||
## 2.1.28 - 18 Oct 2022
|
||||
|
||||
- Update default CodeQL bundle version to 2.11.1. [#1294](https://github.com/github/codeql-action/pull/1294)
|
||||
- Replace uses of GitHub Actions command `set-output` because it is now deprecated. See more information in the [GitHub Changelog](https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/). [#1301](https://github.com/github/codeql-action/pull/1301)
|
||||
|
||||
## 2.1.27 - 06 Oct 2022
|
||||
|
||||
- We are rolling out a feature of the CodeQL Action in October 2022 that changes the way that Go code is analyzed to be more consistent with other compiled languages like C/C++, C#, and Java. You do not need to alter your code scanning workflows. If you encounter any problems, please [file an issue](https://github.com/github/codeql-action/issues) or open a private ticket with GitHub Support and request an escalation to engineering.
|
||||
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
14
lib/actions-util.js
generated
14
lib/actions-util.js
generated
@@ -502,6 +502,12 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
const runnerOs = (0, util_1.getRequiredEnvParam)("RUNNER_OS");
|
||||
const codeQlCliVersion = (0, util_1.getCachedCodeQlVersion)();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
const statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
@@ -515,6 +521,7 @@ async function createStatusReportBase(actionName, status, actionStartedAt, cause
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
runner_os: runnerOs,
|
||||
action_version: pkg.version,
|
||||
};
|
||||
@@ -563,13 +570,6 @@ const INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code sc
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
const gitHubVersion = await api.getGitHubVersionActionsOnly();
|
||||
if ((0, util_1.isGitHubGhesVersionBelow)(gitHubVersion, "3.2.0")) {
|
||||
// GHES 3.1 and earlier versions reject unexpected properties, which means
|
||||
// that they will reject status reports with newly added properties.
|
||||
// Inhibiting status reporting for GHES < 3.2 avoids such failures.
|
||||
return true;
|
||||
}
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug(`Sending status report: ${statusReportJSON}`);
|
||||
// If in test mode we don't want to upload the results
|
||||
|
||||
File diff suppressed because one or more lines are too long
14
lib/analyze-action.js
generated
14
lib/analyze-action.js
generated
@@ -95,7 +95,7 @@ function doesGoExtractionOutputExist(config) {
|
||||
].some((ext) => fileName.endsWith(ext))));
|
||||
}
|
||||
/**
|
||||
* When Go extraction reconciliation is enabled, either via the feature flag
|
||||
* When Go extraction reconciliation is enabled, either via the feature
|
||||
* or an environment variable, we will attempt to autobuild Go to preserve
|
||||
* compatibility for users who have set up Go using a legacy scanning style
|
||||
* CodeQL workflow, i.e. one without an autobuild step or manual build
|
||||
@@ -107,11 +107,11 @@ function doesGoExtractionOutputExist(config) {
|
||||
* - We approximate whether manual build steps are present by looking at
|
||||
* whether any extraction output already exists for Go.
|
||||
*/
|
||||
async function runAutobuildIfLegacyGoWorkflow(config, featureFlags, logger) {
|
||||
async function runAutobuildIfLegacyGoWorkflow(config, featureEnablement, logger) {
|
||||
if (!config.languages.includes(languages_1.Language.go)) {
|
||||
return;
|
||||
}
|
||||
if (!(await util.isGoExtractionReconciliationEnabled(featureFlags))) {
|
||||
if (!(await util.isGoExtractionReconciliationEnabled(featureEnablement))) {
|
||||
logger.debug("Won't run Go autobuild since Go extraction reconciliation is not enabled.");
|
||||
return;
|
||||
}
|
||||
@@ -159,11 +159,11 @@ async function run() {
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, featureFlags, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, featureFlags);
|
||||
const features = new feature_flags_1.Features(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, features, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, features);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, featureFlags);
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
|
||||
File diff suppressed because one or more lines are too long
20
lib/analyze.js
generated
20
lib/analyze.js
generated
@@ -70,12 +70,12 @@ async function setupPythonExtractor(logger) {
|
||||
logger.info(`Setting LGTM_PYTHON_SETUP_VERSION=${output}`);
|
||||
process.env["LGTM_PYTHON_SETUP_VERSION"] = output;
|
||||
}
|
||||
async function createdDBForScannedLanguages(codeql, config, logger, featureFlags) {
|
||||
async function createdDBForScannedLanguages(codeql, config, logger, featureEnablement) {
|
||||
// Insert the LGTM_INDEX_X env vars at this point so they are set when
|
||||
// we extract any scanned languages.
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
for (const language of config.languages) {
|
||||
if ((0, languages_1.isScannedLanguage)(language, await util.isGoExtractionReconciliationEnabled(featureFlags), logger) &&
|
||||
if ((0, languages_1.isScannedLanguage)(language, await util.isGoExtractionReconciliationEnabled(featureEnablement), logger) &&
|
||||
!dbIsFinalized(config, language, logger)) {
|
||||
logger.startGroup(`Extracting ${language}`);
|
||||
if (language === languages_1.Language.python) {
|
||||
@@ -99,10 +99,10 @@ function dbIsFinalized(config, language, logger) {
|
||||
}
|
||||
}
|
||||
exports.dbIsFinalized = dbIsFinalized;
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags) {
|
||||
async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureEnablement) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const extractionStart = perf_hooks_1.performance.now();
|
||||
await createdDBForScannedLanguages(codeql, config, logger, featureFlags);
|
||||
await createdDBForScannedLanguages(codeql, config, logger, featureEnablement);
|
||||
const extractionTime = perf_hooks_1.performance.now() - extractionStart;
|
||||
const trapImportStart = perf_hooks_1.performance.now();
|
||||
for (const language of config.languages) {
|
||||
@@ -122,7 +122,7 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger,
|
||||
};
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureFlags) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
||||
const statusReport = {};
|
||||
let locPromise = Promise.resolve({});
|
||||
const cliCanCountBaseline = await cliCanCountLoC();
|
||||
@@ -136,7 +136,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
config.paths, config.pathsIgnore, config.languages, logger);
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await util.logCodeScanningConfigInCli(codeql, featureFlags, logger);
|
||||
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
||||
for (const language of config.languages) {
|
||||
const queries = config.queries[language];
|
||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||
@@ -148,7 +148,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
throw new Error(`Unable to analyse ${language} as no queries were selected for this language`);
|
||||
}
|
||||
try {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureFlags)) {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
// If we are using the codescanning config in the CLI,
|
||||
// much of the work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
@@ -285,7 +285,7 @@ function createQuerySuiteContents(queries, queryFilters) {
|
||||
return yaml.dump(queries.map((q) => ({ query: q })).concat(queryFilters));
|
||||
}
|
||||
exports.createQuerySuiteContents = createQuerySuiteContents;
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, featureFlags) {
|
||||
async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, featureEnablement) {
|
||||
try {
|
||||
await (0, del_1.default)(outputDir, { force: true });
|
||||
}
|
||||
@@ -295,7 +295,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, f
|
||||
}
|
||||
}
|
||||
await fs.promises.mkdir(outputDir, { recursive: true });
|
||||
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureFlags);
|
||||
const timings = await finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger, featureEnablement);
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
// WARNING: This does not _really_ end tracing, as the tracer will restore its
|
||||
// critical environment variables and it'll still be active for all processes
|
||||
@@ -304,7 +304,7 @@ async function runFinalize(outputDir, threadsFlag, memoryFlag, config, logger, f
|
||||
// step.
|
||||
if (await util.codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Delete variables as specified by the end-tracing script
|
||||
await (0, tracer_config_1.endTracingForCluster)(config, await util.isGoExtractionReconciliationEnabled(featureFlags), logger);
|
||||
await (0, tracer_config_1.endTracingForCluster)(config, await util.isGoExtractionReconciliationEnabled(featureEnablement), logger);
|
||||
}
|
||||
else {
|
||||
// Delete the tracer config env var to avoid tracing ourselves
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/analyze.test.js
generated
5
lib/analyze.test.js
generated
@@ -30,7 +30,6 @@ const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const count = __importStar(require("./count-loc"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -132,7 +131,7 @@ const util = __importStar(require("./util"));
|
||||
builtin: ["foo.ql"],
|
||||
custom: [],
|
||||
};
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([]));
|
||||
const builtinStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
|
||||
const hasPacks = language in packs;
|
||||
const statusReportKeys = Object.keys(builtinStatusReport).sort();
|
||||
if (hasPacks) {
|
||||
@@ -158,7 +157,7 @@ const util = __importStar(require("./util"));
|
||||
},
|
||||
],
|
||||
};
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, feature_flags_1.createFeatureFlags)([]));
|
||||
const customStatusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([]));
|
||||
t.deepEqual(Object.keys(customStatusReport).length, 2);
|
||||
t.true(`analyze_custom_queries_${language}_duration_ms` in customStatusReport);
|
||||
const expectedSearchPathsUsed = hasPacks
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.7", "minimumVersion": "3.2" }
|
||||
{ "maximumVersion": "3.7", "minimumVersion": "3.3" }
|
||||
|
||||
4
lib/autobuild-action.js
generated
4
lib/autobuild-action.js
generated
@@ -54,12 +54,12 @@ async function run() {
|
||||
}
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, (0, api_client_1.getApiDetails)(), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), logger);
|
||||
const features = new feature_flags_1.Features(gitHubVersion, (0, api_client_1.getApiDetails)(), (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY")), logger);
|
||||
const config = await configUtils.getConfig((0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
if (config === undefined) {
|
||||
throw new Error("Config file could not be found at expected location. Has the 'init' action been called?");
|
||||
}
|
||||
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, featureFlags, logger);
|
||||
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, features, logger);
|
||||
if (languages !== undefined) {
|
||||
const workingDirectory = (0, actions_util_1.getOptionalInput)("working-directory");
|
||||
if (workingDirectory) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAA0E;AAC1E,2CAAwE;AACxE,4DAA8C;AAC9C,mDAAqD;AACrD,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAOgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAC1D,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,EAAE,WAAI,CAAC,OAAO,CAAC,CAAC;QAE/D,MAAM,YAAY,GAAG,IAAI,kCAAkB,CACzC,aAAa,EACb,IAAA,0BAAa,GAAE,EACf,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,MAAM,CACP,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAOwB;AACxB,6CAA0E;AAC1E,2CAAwE;AACxE,4DAA8C;AAC9C,mDAA2C;AAC3C,2CAAuC;AACvC,uCAA6C;AAC7C,6CAAkD;AAClD,iCAOgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AASvC,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,WAAI,CAAC,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IAEjD,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EACd,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,wCAA2B,GAAE,CAAC;QAC1D,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,EAAE,WAAI,CAAC,OAAO,CAAC,CAAC;QAE/D,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,IAAA,0BAAa,GAAE,EACf,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,MAAM,CACP,CAAC;QAEF,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC;QACxE,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,aAAT,SAAS,cAAT,SAAS,GAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
4
lib/autobuild.js
generated
4
lib/autobuild.js
generated
@@ -23,8 +23,8 @@ exports.runAutobuild = exports.determineAutobuildLanguages = void 0;
|
||||
const codeql_1 = require("./codeql");
|
||||
const languages_1 = require("./languages");
|
||||
const util = __importStar(require("./util"));
|
||||
async function determineAutobuildLanguages(config, featureFlags, logger) {
|
||||
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureFlags);
|
||||
async function determineAutobuildLanguages(config, featureEnablement, logger) {
|
||||
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureEnablement);
|
||||
// Attempt to find a language to autobuild
|
||||
// We want pick the dominant language in the repo from the ones we're able to build
|
||||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,qCAAqC;AAGrC,2CAAyD;AAEzD,6CAA+B;AAExB,KAAK,UAAU,2BAA2B,CAC/C,MAA0B,EAC1B,YAA0B,EAC1B,MAAc;IAEd,MAAM,mCAAmC,GACvC,MAAM,IAAI,CAAC,mCAAmC,CAAC,YAAY,CAAC,CAAC;IAC/D,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,EAAE,mCAAmC,EAAE,MAAM,CAAC,CACjE,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;QAChD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;KAChD;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE;QACpE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;KAC7B;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,4BAA4B;YAC5B,0NAA0N,CAC7N,CAAC;KACH;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAzFD,kEAyFC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,qCAAqC;AAGrC,2CAAyD;AAEzD,6CAA+B;AAExB,KAAK,UAAU,2BAA2B,CAC/C,MAA0B,EAC1B,iBAAoC,EACpC,MAAc;IAEd,MAAM,mCAAmC,GACvC,MAAM,IAAI,CAAC,mCAAmC,CAAC,iBAAiB,CAAC,CAAC;IACpE,0CAA0C;IAC1C,mFAAmF;IACnF,oFAAoF;IACpF,4EAA4E;IAC5E,MAAM,kBAAkB,GAAG,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CACvD,IAAA,4BAAgB,EAAC,CAAC,EAAE,mCAAmC,EAAE,MAAM,CAAC,CACjE,CAAC;IAEF,IAAI,CAAC,kBAAkB,EAAE;QACvB,MAAM,CAAC,IAAI,CACT,iEAAiE,CAClE,CAAC;QACF,OAAO,SAAS,CAAC;KAClB;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACH,MAAM,2BAA2B,GAAG,kBAAkB,CAAC,MAAM,CAC3D,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,oBAAQ,CAAC,EAAE,CACzB,CAAC;IAEF,MAAM,SAAS,GAAe,EAAE,CAAC;IACjC,yEAAyE;IACzE,UAAU;IACV,IAAI,2BAA2B,CAAC,CAAC,CAAC,KAAK,SAAS,EAAE;QAChD,SAAS,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC,CAAC,CAAC;KAChD;IACD,uEAAuE;IACvE,wCAAwC;IACxC,IAAI,kBAAkB,CAAC,MAAM,KAAK,2BAA2B,CAAC,MAAM,EAAE;QACpE,SAAS,CAAC,IAAI,CAAC,oBAAQ,CAAC,EAAE,CAAC,CAAC;KAC7B;IAED,MAAM,CAAC,KAAK,CAAC,kBAAkB,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IAE3D,2EAA2E;IAC3E,4EAA4E;IAC5E,2CAA2C;IAC3C,uEAAuE;IACvE,2EAA2E;IAC3E,uEAAuE;IACvE,yCAAyC;IACzC,IAAI,2BAA2B,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1C,MAAM,CAAC,OAAO,CACZ,oCAAoC,SAAS,CAAC,IAAI,CAChD,OAAO,CACR,8BAA8B,2BAA2B;aACvD,KAAK,CAAC,CAAC,CAAC;aACR,IAAI,CACH,OAAO,CACR,kFAAkF;YACnF,4BAA4B;YAC5B,0NAA0N,CAC7N,CAAC;KACH;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAzFD,kEAyFC;AAEM,KAAK,UAAU,YAAY,CAChC,QAAkB,EAClB,MAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;IACxE,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IACpC,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AATD,oCASC"}
|
||||
68
lib/codeql.js
generated
68
lib/codeql.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_ML_POWERED_QUERIES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.convertToSemVer = exports.getCodeQLURLVersion = exports.setupCodeQL = exports.getCodeQLActionRepository = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CODEQL_VERSION_CONFIG_FILES = exports.CODEQL_VERSION_COUNTS_LINES = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
@@ -43,10 +43,11 @@ const trap_caching_1 = require("./trap-caching");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, error) {
|
||||
constructor(cmd, args, exitCode, error, output) {
|
||||
super(`Failure invoking ${cmd} with arguments ${args}.\n
|
||||
Exit code ${exitCode} and error was:\n
|
||||
${error}`);
|
||||
this.output = output;
|
||||
}
|
||||
}
|
||||
exports.CommandInvocationError = CommandInvocationError;
|
||||
@@ -79,7 +80,6 @@ const CODEQL_VERSION_GROUP_RULES = "2.5.5";
|
||||
const CODEQL_VERSION_SARIF_GROUP = "2.5.3";
|
||||
exports.CODEQL_VERSION_COUNTS_LINES = "2.6.2";
|
||||
const CODEQL_VERSION_CUSTOM_QUERY_HELP = "2.7.1";
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES = "2.7.5";
|
||||
const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
@@ -94,6 +94,11 @@ exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
* versions above that.
|
||||
*/
|
||||
exports.CODEQL_VERSION_NEW_TRACING = "2.7.0";
|
||||
/**
|
||||
* Versions 2.7.3+ of the CodeQL CLI support build tracing with glibc 2.34 on Linux. Versions before
|
||||
* this cannot perform build tracing when running on the Actions `ubuntu-22.04` runner image.
|
||||
*/
|
||||
exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = "2.7.3";
|
||||
/**
|
||||
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||
@@ -220,13 +225,13 @@ async function getCodeQLBundleDownloadURL(apiDetails, variant, logger) {
|
||||
* @param apiDetails
|
||||
* @param tempDir
|
||||
* @param variant
|
||||
* @param featureFlags
|
||||
* @param features
|
||||
* @param logger
|
||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||
* version requirement. Must be set to true outside tests.
|
||||
* @returns a { CodeQL, toolsVersion } object.
|
||||
*/
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags, logger, checkVersion) {
|
||||
async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, features, logger, checkVersion) {
|
||||
try {
|
||||
const forceLatestReason =
|
||||
// We use the special value of 'latest' to prioritize the version in the
|
||||
@@ -234,12 +239,12 @@ async function setupCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags
|
||||
codeqlURL === "latest"
|
||||
? '"tools: latest" was requested'
|
||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
||||
// the toolcache when the appropriate feature flag is enabled. This
|
||||
// the toolcache when the appropriate feature is enabled. This
|
||||
// allows us to quickly rollback a broken bundle that has made its way
|
||||
// into the toolcache.
|
||||
codeqlURL === undefined &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.BypassToolcacheEnabled))
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature flag is enabled"
|
||||
(await features.getValue(feature_flags_1.Feature.BypassToolcacheEnabled))
|
||||
? "a specific version of CodeQL was not requested and the bypass toolcache feature is enabled"
|
||||
: undefined;
|
||||
const forceLatest = forceLatestReason !== undefined;
|
||||
if (forceLatest) {
|
||||
@@ -468,15 +473,32 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
// action/runner has been implemented in `codeql database trace-command`
|
||||
// _and_ is present in the latest supported CLI release.)
|
||||
const envFile = path.resolve(databasePath, "working", "env.tmp");
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
databasePath,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
process.execPath,
|
||||
tracerEnvJs,
|
||||
envFile,
|
||||
]);
|
||||
try {
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
databasePath,
|
||||
...getExtraOptionsFromEnv(["database", "trace-command"]),
|
||||
process.execPath,
|
||||
tracerEnvJs,
|
||||
envFile,
|
||||
]);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof CommandInvocationError &&
|
||||
e.output.includes("undefined symbol: __libc_dlopen_mode, version GLIBC_PRIVATE") &&
|
||||
process.platform === "linux" &&
|
||||
!(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_TRACING_GLIBC_2_34))) {
|
||||
throw new util.UserError("The CodeQL CLI is incompatible with the version of glibc on your system. " +
|
||||
`Please upgrade to CodeQL CLI version ${exports.CODEQL_VERSION_TRACING_GLIBC_2_34} or ` +
|
||||
"later. If you cannot upgrade to a newer version of the CodeQL CLI, you can " +
|
||||
`alternatively run your workflow on another runner image such as "ubuntu-20.04" ` +
|
||||
"that has glibc 2.33 or earlier installed.");
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
return JSON.parse(fs.readFileSync(envFile, "utf-8"));
|
||||
},
|
||||
async databaseInit(databasePath, language, sourceRoot) {
|
||||
@@ -489,9 +511,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, processLevel, featureEnablement, logger) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureFlags);
|
||||
const isGoExtractionReconciliationEnabled = await util.isGoExtractionReconciliationEnabled(featureEnablement);
|
||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l, isGoExtractionReconciliationEnabled, logger)).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
extraArgs.push(...(await (0, trap_caching_1.getTrapCachingExtractorConfigArgs)(config)));
|
||||
@@ -516,7 +538,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
const configLocation = await generateCodescanningConfig(codeql, config, featureFlags);
|
||||
const configLocation = await generateCodescanningConfig(codeql, config, featureEnablement);
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
}
|
||||
@@ -865,7 +887,7 @@ async function runTool(cmd, args = []) {
|
||||
ignoreReturnCode: true,
|
||||
}).exec();
|
||||
if (exitCode !== 0)
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error);
|
||||
throw new CommandInvocationError(cmd, args, exitCode, error, output);
|
||||
return output;
|
||||
}
|
||||
/**
|
||||
@@ -876,9 +898,9 @@ async function runTool(cmd, args = []) {
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodescanningConfig(codeql, config, featureFlags) {
|
||||
async function generateCodescanningConfig(codeql, config, featureEnablement) {
|
||||
var _a;
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureFlags))) {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
||||
return;
|
||||
}
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
|
||||
File diff suppressed because one or more lines are too long
69
lib/codeql.test.js
generated
69
lib/codeql.test.js
generated
@@ -81,7 +81,7 @@ ava_1.default.beforeEach(() => {
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
});
|
||||
async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDir, toolsInput, version, }) {
|
||||
async function mockApiAndSetupCodeQL({ apiDetails, featureEnablement, isPinned, tmpDir, toolsInput, version, }) {
|
||||
var _a;
|
||||
const platform = process.platform === "win32"
|
||||
? "win64"
|
||||
@@ -95,7 +95,7 @@ async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDi
|
||||
(0, nock_1.default)(baseUrl)
|
||||
.get(relativeUrl)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle${isPinned ? "-pinned" : ""}.tar.gz`));
|
||||
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, featureFlags !== null && featureFlags !== void 0 ? featureFlags : (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
return await codeql.setupCodeQL(toolsInput ? toolsInput.input : `${baseUrl}${relativeUrl}`, apiDetails !== null && apiDetails !== void 0 ? apiDetails : sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, featureEnablement !== null && featureEnablement !== void 0 ? featureEnablement : (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
}
|
||||
(0, ava_1.default)("download codeql bundle cache", async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
@@ -138,7 +138,7 @@ async function mockApiAndSetupCodeQL({ apiDetails, featureFlags, isPinned, tmpDi
|
||||
});
|
||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200601"));
|
||||
t.deepEqual(pinnedCodeQLConfig.toolsVersion, "20200601");
|
||||
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
const codeQLConfig = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
t.deepEqual(codeQLConfig.toolsVersion, "0.0.0-20200601");
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
@@ -194,8 +194,8 @@ const TOOLCACHE_BYPASS_TEST_CASES = [
|
||||
false,
|
||||
],
|
||||
];
|
||||
for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
|
||||
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature flag ${isFeatureFlagEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
|
||||
for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCACHE_BYPASS_TEST_CASES) {
|
||||
(0, ava_1.default)(`download codeql bundle ${shouldToolcacheBeBypassed ? "bypasses" : "does not bypass"} toolcache when feature ${isFeatureEnabled ? "enabled" : "disabled"} and tools: ${toolsInput} passed`, async (t) => {
|
||||
await util.withTmpDir(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
await mockApiAndSetupCodeQL({
|
||||
@@ -208,7 +208,7 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
|
||||
await mockApiAndSetupCodeQL({
|
||||
version: defaults.bundleVersion,
|
||||
apiDetails: sampleApiDetails,
|
||||
featureFlags: (0, feature_flags_1.createFeatureFlags)(isFeatureFlagEnabled ? [feature_flags_1.FeatureFlag.BypassToolcacheEnabled] : []),
|
||||
featureEnablement: (0, testing_utils_1.createFeatures)(isFeatureEnabled ? [feature_flags_1.Feature.BypassToolcacheEnabled] : []),
|
||||
toolsInput: { input: toolsInput },
|
||||
tmpDir,
|
||||
});
|
||||
@@ -240,7 +240,7 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
|
||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
||||
.replyWithFile(200, path.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true), false);
|
||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||
t.is(cachedVersions.length, 1);
|
||||
});
|
||||
@@ -332,7 +332,7 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
|
||||
packsInputCombines: false,
|
||||
},
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should NOT have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
@@ -342,35 +342,28 @@ for (const [isFeatureFlagEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOO
|
||||
// Test macro for ensuring different variants of injected augmented configurations
|
||||
const injectedConfigMacro = ava_1.default.macro({
|
||||
exec: async (t, augmentationProperties, configOverride, expectedConfig) => {
|
||||
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "true";
|
||||
try {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
tempDir,
|
||||
augmentationProperties,
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.truthy(configArg, "Should have injected a codescanning config");
|
||||
const configFile = configArg.split("=")[1];
|
||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(augmentedConfig, expectedConfig);
|
||||
await (0, del_1.default)(configFile, { force: true });
|
||||
});
|
||||
}
|
||||
finally {
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
const thisStubConfig = {
|
||||
...stubConfig,
|
||||
...configOverride,
|
||||
tempDir,
|
||||
augmentationProperties,
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.truthy(configArg, "Should have injected a codescanning config");
|
||||
const configFile = configArg.split("=")[1];
|
||||
const augmentedConfig = yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
t.deepEqual(augmentedConfig, expectedConfig);
|
||||
await (0, del_1.default)(configFile, { force: true });
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") => `databaseInitCluster() injected config: ${providedTitle}`,
|
||||
});
|
||||
@@ -561,7 +554,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_CONFIG_FILES);
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
|
||||
File diff suppressed because one or more lines are too long
33
lib/config-utils.js
generated
33
lib/config-utils.js
generated
@@ -134,7 +134,7 @@ const builtinSuites = ["security-extended", "security-and-quality"];
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureFlags, configFile) {
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureEnablement, configFile) {
|
||||
var _a;
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
@@ -151,8 +151,7 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
languages.includes("javascript") &&
|
||||
(found === "security-extended" || found === "security-and-quality") &&
|
||||
!((_a = packs.javascript) === null || _a === void 0 ? void 0 : _a.some(isMlPoweredJsQueriesPack)) &&
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled)) &&
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES))) {
|
||||
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
@@ -227,7 +226,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureFlags, logger, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -239,7 +238,7 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureFlags, configFile);
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
@@ -454,13 +453,13 @@ async function getLanguages(codeQL, languagesInput, repository, apiDetails, logg
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureFlags, logger) {
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
@@ -478,7 +477,7 @@ function shouldAddConfigFileQueries(queriesInput) {
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, apiDetails, logger);
|
||||
const queries = {};
|
||||
for (const language of languages) {
|
||||
@@ -496,7 +495,7 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
|
||||
: {};
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
@@ -532,7 +531,7 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
var _a;
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
@@ -581,7 +580,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
// in the config file.
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureFlags, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(rawQueriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -593,7 +592,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueriesMissingUses(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureFlags, logger, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
@@ -883,16 +882,16 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
var _a, _b, _c;
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
}
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
@@ -908,8 +907,8 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureFlags, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureFlags))) {
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
60
lib/config-utils.test.js
generated
60
lib/config-utils.test.js
generated
@@ -94,8 +94,8 @@ function mockListLanguages(languages) {
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
t.deepEqual(config, await configUtils.getDefaultConfig(languages, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
@@ -120,7 +120,7 @@ function mockListLanguages(languages) {
|
||||
t.false(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// Sanity check that getConfig returns undefined before we have called initConfig
|
||||
t.deepEqual(await configUtils.getConfig(tmpDir, logger), undefined);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const config1 = await configUtils.initConfig("javascript,python", undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
// The saved config file should now exist
|
||||
t.true(fs.existsSync(configUtils.getPathToParsedConfigFile(tmpDir)));
|
||||
// And that same newly-initialised config should now be returned by getConfig
|
||||
@@ -136,7 +136,7 @@ function mockListLanguages(languages) {
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, "../input", undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, "../input", undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -149,7 +149,7 @@ function mockListLanguages(languages) {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -163,7 +163,7 @@ function mockListLanguages(languages) {
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -239,7 +239,7 @@ function mockListLanguages(languages) {
|
||||
};
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const actualConfig = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "my-artifact", "my-db", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Should exactly equal the object we constructed earlier
|
||||
t.deepEqual(actualConfig, expectedConfig);
|
||||
});
|
||||
@@ -278,7 +278,7 @@ function mockListLanguages(languages) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript";
|
||||
const configFilePath = createConfigFile(inputFileContents, tmpDir);
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolve queries was called correctly
|
||||
t.deepEqual(resolveQueriesArgs.length, 1);
|
||||
t.deepEqual(resolveQueriesArgs[0].queries, [
|
||||
@@ -324,7 +324,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, undefined, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries
|
||||
// and once for `./foo` from the config file.
|
||||
@@ -360,7 +360,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries and once for `./override`,
|
||||
// but won't be called for './foo' from the config file.
|
||||
@@ -395,7 +395,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for `./workflow-query`,
|
||||
// but won't be called for the default one since that was disabled
|
||||
@@ -424,7 +424,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly:
|
||||
// It'll be called once for the default queries,
|
||||
// and then once for each of the two queries from the workflow
|
||||
@@ -466,7 +466,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
const languages = "javascript";
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const config = await configUtils.initConfig(languages, testQueries, undefined, undefined, configFilePath, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
// Check resolveQueries was called correctly
|
||||
// It'll be called once for the default queries,
|
||||
// once for each of additional1 and additional2,
|
||||
@@ -508,7 +508,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, queries, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -554,7 +554,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.mkdirSync(path.join(tmpDir, "foo/bar/dev"), { recursive: true });
|
||||
const configFile = "octo-org/codeql-config/config.yaml@main";
|
||||
const languages = "javascript";
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.assert(spyGetContents.called);
|
||||
});
|
||||
});
|
||||
@@ -564,7 +564,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -580,7 +580,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, repoReference, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -600,7 +600,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
},
|
||||
});
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -612,7 +612,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -643,7 +643,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
const configFile = path.join(tmpDir, "codeql-config.yaml");
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
const languages = "javascript";
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
});
|
||||
@@ -680,7 +680,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
fs.writeFileSync(configFile, inputFileContents);
|
||||
fs.mkdirSync(path.join(tmpDir, "foo"));
|
||||
const languages = "javascript,python,cpp";
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs, queries } = await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: ["a/b@1.2.3"],
|
||||
[languages_1.Language.python]: ["c/d@1.2.3"],
|
||||
@@ -726,7 +726,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
const inputFile = path.join(tmpDir, configFile);
|
||||
fs.writeFileSync(inputFile, inputFileContents, "utf8");
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, configFile, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
@@ -968,7 +968,7 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
||||
(0, ava_1.default)("input with + only", parseInputAndConfigErrorMacro, {}, " + ", [languages_1.Language.cpp], true, /remove the '\+'/);
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], false, /"xxx" is not a valid pack/);
|
||||
const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
@@ -987,9 +987,7 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
return { packs: [] };
|
||||
},
|
||||
});
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, feature_flags_1.createFeatureFlags)(isMlPoweredQueriesFlagEnabled
|
||||
? [feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled]
|
||||
: []), (0, logging_1.getRunnerLogger)(true));
|
||||
const { packs } = await configUtils.initConfig("javascript", queriesInput, packsInput, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example " }, tmpDir, codeQL, tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)(isMlPoweredQueriesEnabled ? [feature_flags_1.Feature.MlPoweredQueriesEnabled] : []), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectedVersionString !== undefined) {
|
||||
t.deepEqual(packs, {
|
||||
[languages_1.Language.javascript]: [
|
||||
@@ -1002,14 +1000,12 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
|
||||
title: (_providedTitle, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString) => `ML-powered queries ${expectedVersionString !== undefined
|
||||
? `${expectedVersionString} are`
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature flag is ${isMlPoweredQueriesFlagEnabled ? "enabled" : "disabled"}`,
|
||||
: "aren't"} loaded for packs: ${packsInput}, queries: ${queriesInput} using CLI v${codeQLVersion} when feature is ${isMlPoweredQueriesEnabled ? "enabled" : "disabled"}`,
|
||||
});
|
||||
// macro, codeQLVersion, isMlPoweredQueriesFlagEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run on v2.7.4 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.4", true, undefined, "security-extended", undefined);
|
||||
// Test that ML-powered queries aren't run when the feature flag is off.
|
||||
// macro, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run when the feature is off.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20220923"
|
||||
"bundleVersion": "codeql-bundle-20221024"
|
||||
}
|
||||
|
||||
183
lib/feature-flags.js
generated
183
lib/feature-flags.js
generated
@@ -19,89 +19,144 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createFeatureFlags = exports.GitHubFeatureFlags = exports.FeatureFlag = void 0;
|
||||
exports.Features = exports.featureConfig = exports.Feature = void 0;
|
||||
const api_client_1 = require("./api-client");
|
||||
const util = __importStar(require("./util"));
|
||||
var FeatureFlag;
|
||||
(function (FeatureFlag) {
|
||||
FeatureFlag["BypassToolcacheEnabled"] = "bypass_toolcache_enabled";
|
||||
FeatureFlag["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
FeatureFlag["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
FeatureFlag["GolangExtractionReconciliationEnabled"] = "golang_extraction_reconciliation_enabled";
|
||||
FeatureFlag["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
})(FeatureFlag = exports.FeatureFlag || (exports.FeatureFlag = {}));
|
||||
var Feature;
|
||||
(function (Feature) {
|
||||
Feature["BypassToolcacheEnabled"] = "bypass_toolcache_enabled";
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["GolangExtractionReconciliationEnabled"] = "golang_extraction_reconciliation_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.BypassToolcacheEnabled]: {
|
||||
envVar: "CODEQL_BYPASS_TOOLCACHE",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.CliConfigFileEnabled]: {
|
||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.1",
|
||||
},
|
||||
[Feature.GolangExtractionReconciliationEnabled]: {
|
||||
envVar: "CODEQL_GOLANG_EXTRACTION_RECONCILIATION",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: "2.7.5",
|
||||
},
|
||||
[Feature.TrapCachingEnabled]: {
|
||||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
};
|
||||
/**
|
||||
* Determines the enablement status of a number of features.
|
||||
* If feature enablement is not able to be determined locally, a request to the
|
||||
* GitHub API is made to determine the enablement status.
|
||||
*/
|
||||
class Features {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @param feature The feature to check.
|
||||
* @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the
|
||||
* feature, the version of the CodeQL CLI will be checked against the minimum version.
|
||||
* If the version is less than the minimum version, the feature will be considered
|
||||
* disabled. If not provided, and a `minimumVersion` is specified for the feature, the
|
||||
* this function will throw.
|
||||
* @returns true if the feature is enabled, false otherwise.
|
||||
*
|
||||
* @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided.
|
||||
*/
|
||||
async getValue(feature, codeql) {
|
||||
if (!codeql && exports.featureConfig[feature].minimumVersion) {
|
||||
throw new Error(`Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`);
|
||||
}
|
||||
// Bypassing the toolcache is disabled in test mode.
|
||||
if (feature === Feature.BypassToolcacheEnabled && util.isInTestMode()) {
|
||||
return false;
|
||||
}
|
||||
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
||||
// Do not use this feature if user explicitly disables it via an environment variable.
|
||||
if (envVar === "false") {
|
||||
return false;
|
||||
}
|
||||
// Never use this feature if the CLI version explicitly can't support it.
|
||||
const minimumVersion = exports.featureConfig[feature].minimumVersion;
|
||||
if (codeql && minimumVersion) {
|
||||
if (!(await util.codeQlVersionAbove(codeql, minimumVersion))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Use this feature if user explicitly enables it via an environment variable.
|
||||
if (envVar === "true") {
|
||||
return true;
|
||||
}
|
||||
// Ask the GitHub API if the feature is enabled.
|
||||
return await this.gitHubFeatureFlags.getValue(feature);
|
||||
}
|
||||
}
|
||||
exports.Features = Features;
|
||||
class GitHubFeatureFlags {
|
||||
constructor(gitHubVersion, apiDetails, repositoryNwo, logger) {
|
||||
this.gitHubVersion = gitHubVersion;
|
||||
this.apiDetails = apiDetails;
|
||||
this.repositoryNwo = repositoryNwo;
|
||||
this.logger = logger;
|
||||
/**/
|
||||
}
|
||||
async getValue(flag) {
|
||||
// Bypassing the toolcache is disabled in test mode.
|
||||
if (flag === FeatureFlag.BypassToolcacheEnabled && util.isInTestMode()) {
|
||||
return false;
|
||||
}
|
||||
async getValue(feature) {
|
||||
const response = await this.getApiResponse();
|
||||
if (response === undefined) {
|
||||
this.logger.debug(`No feature flags API response for ${flag}, considering it disabled.`);
|
||||
this.logger.debug(`No feature flags API response for ${feature}, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
const flagValue = response[flag];
|
||||
if (flagValue === undefined) {
|
||||
this.logger.debug(`Feature flag '${flag}' undefined in API response, considering it disabled.`);
|
||||
const featureEnablement = response[feature];
|
||||
if (featureEnablement === undefined) {
|
||||
this.logger.debug(`Feature '${feature}' undefined in API response, considering it disabled.`);
|
||||
return false;
|
||||
}
|
||||
return flagValue;
|
||||
return !!featureEnablement;
|
||||
}
|
||||
async getApiResponse() {
|
||||
const loadApiResponse = async () => {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug("Not running against github.com. Disabling all feature flags.");
|
||||
return {};
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(this.apiDetails);
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 403) {
|
||||
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||
}
|
||||
else {
|
||||
// Some feature flags, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
// Considering these feature flags disabled in the event of a transient error could
|
||||
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
|
||||
// the feature flags.
|
||||
throw new Error(`Encountered an error while trying to load feature flags: ${e}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
const apiResponse = this.cachedApiResponse || (await loadApiResponse());
|
||||
const apiResponse = this.cachedApiResponse || (await this.loadApiResponse());
|
||||
this.cachedApiResponse = apiResponse;
|
||||
return apiResponse;
|
||||
}
|
||||
async loadApiResponse() {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
||||
return {};
|
||||
}
|
||||
const client = (0, api_client_1.getApiClient)(this.apiDetails);
|
||||
try {
|
||||
const response = await client.request("GET /repos/:owner/:repo/code-scanning/codeql-action/features", {
|
||||
owner: this.repositoryNwo.owner,
|
||||
repo: this.repositoryNwo.repo,
|
||||
});
|
||||
return response.data;
|
||||
}
|
||||
catch (e) {
|
||||
if (util.isHTTPError(e) && e.status === 403) {
|
||||
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||
}
|
||||
else {
|
||||
// Some features, such as `ml_powered_queries_enabled` affect the produced alerts.
|
||||
// Considering these features disabled in the event of a transient error could
|
||||
// therefore lead to alert churn. As a result, we crash if we cannot determine the value of
|
||||
// the feature.
|
||||
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.GitHubFeatureFlags = GitHubFeatureFlags;
|
||||
/**
|
||||
* Create a feature flags instance with the specified set of enabled flags.
|
||||
*
|
||||
* This should be only used within tests.
|
||||
*/
|
||||
function createFeatureFlags(enabledFlags) {
|
||||
return {
|
||||
getValue: async (flag) => {
|
||||
return enabledFlags.includes(flag);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createFeatureFlags = createFeatureFlags;
|
||||
//# sourceMappingURL=feature-flags.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAG9D,6CAA+B;AAM/B,IAAY,WAMX;AAND,WAAY,WAAW;IACrB,kEAAmD,CAAA;IACnD,qEAAsD,CAAA;IACtD,0DAA2C,CAAA;IAC3C,iGAAkF,CAAA;IAClF,+DAAgD,CAAA;AAClD,CAAC,EANW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAMtB;AAUD,MAAa,kBAAkB;IAG7B,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;IACrB,CAAC;IAEJ,KAAK,CAAC,QAAQ,CAAC,IAAiB;QAC9B,oDAAoD;QACpD,IAAI,IAAI,KAAK,WAAW,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACtE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,IAAI,4BAA4B,CACtE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,SAAS,GAAG,QAAQ,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,iBAAiB,IAAI,uDAAuD,CAC7E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;YACjC,iDAAiD;YACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;gBACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8DAA8D,CAC/D,CAAC;gBACF,OAAO,EAAE,CAAC;aACX;YACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC7C,IAAI;gBACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;oBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;oBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;iBAC9B,CACF,CAAC;gBACF,OAAO,QAAQ,CAAC,IAAI,CAAC;aACtB;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;oBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;wBAC9F,oEAAoE;wBACpE,qFAAqF;wBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;iBACH;qBAAM;oBACL,uFAAuF;oBACvF,mFAAmF;oBACnF,2FAA2F;oBAC3F,qBAAqB;oBACrB,MAAM,IAAI,KAAK,CACb,4DAA4D,CAAC,EAAE,CAChE,CAAC;iBACH;aACF;QACH,CAAC,CAAC;QAEF,MAAM,WAAW,GAAG,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,eAAe,EAAE,CAAC,CAAC;QACxE,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;CACF;AA5ED,gDA4EC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,YAA2B;IAC5D,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;YACvB,OAAO,YAAY,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;AACJ,CAAC;AAND,gDAMC"}
|
||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA8D;AAI9D,6CAA+B;AAM/B,IAAY,OAMX;AAND,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,2DAAgD,CAAA;IAChD,6FAAkF,CAAA;IAClF,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;AAC7C,CAAC,EANW,OAAO,GAAP,eAAO,KAAP,eAAO,QAMlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,qCAAqC,CAAC,EAAE;QAC/C,MAAM,EAAE,yCAAyC;QACjD,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;CACF,CAAC;AAUF;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,UAAU,EACV,aAAa,EACb,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QAED,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAlED,4BAkEC;AAED,MAAM,kBAAkB;IAGtB,YACU,aAAiC,EACjC,UAA4B,EAC5B,aAA4B,EAC5B,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,eAAU,GAAV,UAAU,CAAkB;QAC5B,kBAAa,GAAb,aAAa,CAAe;QAC5B,WAAM,GAAN,MAAM,CAAQ;QAEtB,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,MAAM,WAAW,GACf,IAAI,CAAC,iBAAiB,IAAI,CAAC,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC,CAAC;QAC3D,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QACrC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,MAAM,MAAM,GAAG,IAAA,yBAAY,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC;QAC7C,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,OAAO,CACnC,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
||||
171
lib/feature-flags.test.js
generated
171
lib/feature-flags.test.js
generated
@@ -19,87 +19,168 @@ const testApiDetails = {
|
||||
apiURL: undefined,
|
||||
};
|
||||
const testRepositoryNwo = (0, repository_1.parseRepositoryNwo)("github/example");
|
||||
const ALL_FEATURE_FLAGS_DISABLED_VARIANTS = [
|
||||
const ALL_FEATURES_DISABLED_VARIANTS = [
|
||||
{
|
||||
description: "GHES",
|
||||
gitHubVersion: { type: util_1.GitHubVariant.GHES, version: "3.0.0" },
|
||||
},
|
||||
{ description: "GHAE", gitHubVersion: { type: util_1.GitHubVariant.GHAE } },
|
||||
];
|
||||
for (const variant of ALL_FEATURE_FLAGS_DISABLED_VARIANTS) {
|
||||
(0, ava_1.default)(`All feature flags are disabled if running against ${variant.description}`, async (t) => {
|
||||
for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
||||
(0, ava_1.default)(`All features are disabled if running against ${variant.description}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(variant.gitHubVersion, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
const featureEnablement = setUpTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.false(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
}
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com. Disabling all feature flags.") !== undefined);
|
||||
"Not running against github.com. Disabling all toggleable features.") !== undefined);
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("API response missing", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const featureEnablement = setUpTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
for (const featureFlag of ["ml_powered_queries_enabled"]) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
`No feature flags API response for ${featureFlag}, considering it disabled.`) !== undefined);
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
||||
}
|
||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags are disabled if they're not returned in API response", async (t) => {
|
||||
(0, ava_1.default)("Features are disabled if they're not returned in API response", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const loggedMessages = [];
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const featureEnablement = setUpTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
for (const flag of Object.values(feature_flags_1.FeatureFlag)) {
|
||||
t.assert((await featureFlags.getValue(flag)) === false);
|
||||
}
|
||||
for (const featureFlag of ["ml_powered_queries_enabled"]) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
`Feature flag '${featureFlag}' undefined in API response, considering it disabled.`) !== undefined);
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
||||
}
|
||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
||||
await t.throwsAsync(async () => featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled), {
|
||||
message: "Encountered an error while trying to load feature flags: Error: some error message",
|
||||
await t.throwsAsync(async () => featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.MlPoweredQueriesEnabled)), {
|
||||
message: "Encountered an error while trying to determine feature enablement: Error: some error message",
|
||||
});
|
||||
});
|
||||
});
|
||||
const FEATURE_FLAGS = ["ml_powered_queries_enabled"];
|
||||
for (const featureFlag of FEATURE_FLAGS) {
|
||||
(0, ava_1.default)(`Feature flag '${featureFlag}' is enabled if enabled in the API response`, async (t) => {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags({ type: util_1.GitHubVariant.DOTCOM }, testApiDetails, testRepositoryNwo, (0, logging_1.getRunnerLogger)(true));
|
||||
const expectedFeatureFlags = {};
|
||||
for (const f of FEATURE_FLAGS) {
|
||||
expectedFeatureFlags[f] = false;
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
// set all features to false except the one we're testing
|
||||
const expectedFeatureEnablement = {};
|
||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||
expectedFeatureEnablement[f] = f === feature;
|
||||
}
|
||||
expectedFeatureFlags[featureFlag] = true;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureFlags);
|
||||
const actualFeatureFlags = {
|
||||
ml_powered_queries_enabled: await featureFlags.getValue(feature_flags_1.FeatureFlag.MlPoweredQueriesEnabled),
|
||||
};
|
||||
t.deepEqual(actualFeatureFlags, expectedFeatureFlags);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// retrieve the values of the actual features
|
||||
const actualFeatureEnablement = {};
|
||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||
actualFeatureEnablement[f] = await featureEnablement.getValue(f, includeCodeQlIfRequired(f));
|
||||
}
|
||||
// All features should be false except the one we're testing
|
||||
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(false);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be disabled initially
|
||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
// set env var to true and check that the feature is now enabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be enabled initially
|
||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
// set env var to false and check that the feature is now disabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
});
|
||||
});
|
||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||
(0, ava_1.default)(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
await t.throwsAsync(async () => featureEnablement.getValue(feature), {
|
||||
message: `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the minimum CLI version is below ${feature_flags_1.featureConfig[feature].minimumVersion}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be disabled when an old CLI version is set
|
||||
let codeql = (0, testing_utils_1.mockCodeQLVersion)("2.0.0");
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
// even setting the env var to true should not enable the feature if
|
||||
// the minimum CLI version is not met
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
// feature should be enabled when a new CLI version is set
|
||||
// and env var is not set
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "";
|
||||
codeql = (0, testing_utils_1.mockCodeQLVersion)(feature_flags_1.featureConfig[feature].minimumVersion);
|
||||
t.assert(await featureEnablement.getValue(feature, codeql));
|
||||
// set env var to false and check that the feature is now disabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
// If we ever run into a situation where we no longer have any features that
|
||||
// specify a minimum version, then we will have a bunch of code no longer being
|
||||
// tested. This is unlikely, and this test will fail if that happens.
|
||||
// If we do end up in that situation, then we should consider adding a synthetic
|
||||
// feature with a minimum version that is only used for tests.
|
||||
(0, ava_1.default)("At least one feature has a minimum version specified", (t) => {
|
||||
t.assert(Object.values(feature_flags_1.featureConfig).some((f) => f.minimumVersion !== undefined), "At least one feature should have a minimum version specified");
|
||||
// An even less likely scenario is that we no longer have any features.
|
||||
t.assert(Object.values(feature_flags_1.featureConfig).length > 0, "There should be at least one feature");
|
||||
});
|
||||
function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message.includes(feature) &&
|
||||
v.message.includes("considering it disabled")) !== undefined);
|
||||
}
|
||||
}
|
||||
function initializeFeatures(initialValue) {
|
||||
return Object.keys(feature_flags_1.featureConfig).reduce((features, key) => {
|
||||
features[key] = initialValue;
|
||||
return features;
|
||||
}, {});
|
||||
}
|
||||
function setUpTests(tmpDir, logger = (0, logging_1.getRunnerLogger)(true), gitHubVersion = { type: util_1.GitHubVariant.DOTCOM }) {
|
||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||
return new feature_flags_1.Features(gitHubVersion, testApiDetails, testRepositoryNwo, logger);
|
||||
}
|
||||
function includeCodeQlIfRequired(feature) {
|
||||
return feature_flags_1.featureConfig[feature].minimumVersion !== undefined
|
||||
? (0, testing_utils_1.mockCodeQLVersion)("9.9.9")
|
||||
: undefined;
|
||||
}
|
||||
//# sourceMappingURL=feature-flags.test.js.map
|
||||
File diff suppressed because one or more lines are too long
19
lib/init-action.js
generated
19
lib/init-action.js
generated
@@ -88,22 +88,22 @@ async function run() {
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersionActionsOnly)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger, util_1.Mode.actions);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const featureFlags = new feature_flags_1.GitHubFeatureFlags(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
const features = new feature_flags_1.Features(gitHubVersion, apiDetails, repositoryNwo, logger);
|
||||
try {
|
||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, featureFlags, logger);
|
||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, features, logger);
|
||||
codeql = initCodeQLResult.codeql;
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.actions, codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("registries"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), await getTrapCachingEnabled(featureFlags),
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("registries"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), await getTrapCachingEnabled(features),
|
||||
// Debug mode is enabled if:
|
||||
// - The `init` Action is passed `debug: true`.
|
||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||
// or by setting the `ACTIONS_STEP_DEBUG` secret to `true`).
|
||||
(0, actions_util_1.getOptionalInput)("debug") === "true" || core.isDebug(), (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, featureFlags, logger);
|
||||
(0, actions_util_1.getOptionalInput)("debug") === "true" || core.isDebug(), (0, actions_util_1.getOptionalInput)("debug-artifact-name") || util_1.DEFAULT_DEBUG_ARTIFACT_NAME, (0, actions_util_1.getOptionalInput)("debug-database-name") || util_1.DEFAULT_DEBUG_DATABASE_NAME, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), codeql, (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), gitHubVersion, apiDetails, features, logger);
|
||||
if (config.languages.includes(languages_1.Language.python) &&
|
||||
(0, actions_util_1.getRequiredInput)("setup-python-dependencies") === "true") {
|
||||
try {
|
||||
@@ -138,7 +138,7 @@ async function run() {
|
||||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
|
||||
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined, featureFlags, logger);
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", undefined, features, logger);
|
||||
if (tracerConfig !== undefined) {
|
||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||
core.exportVariable(key, value);
|
||||
@@ -158,11 +158,16 @@ async function run() {
|
||||
}
|
||||
await sendSuccessStatusReport(startedAt, config, toolsVersion, logger);
|
||||
}
|
||||
async function getTrapCachingEnabled(featureFlags) {
|
||||
async function getTrapCachingEnabled(featureEnablement) {
|
||||
// If the workflow specified something always respect that
|
||||
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
||||
if (trapCaching !== undefined)
|
||||
return trapCaching === "true";
|
||||
return await featureFlags.getValue(feature_flags_1.FeatureFlag.TrapCachingEnabled);
|
||||
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
||||
if (!(0, util_1.isHostedRunner)())
|
||||
return false;
|
||||
// On hosted runners, respect the feature flag
|
||||
return await featureEnablement.getValue(feature_flags_1.Feature.TrapCachingEnabled);
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
|
||||
File diff suppressed because one or more lines are too long
14
lib/init.js
generated
14
lib/init.js
generated
@@ -30,28 +30,28 @@ const configUtils = __importStar(require("./config-utils"));
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, variant, featureFlags, logger) {
|
||||
async function initCodeQL(codeqlURL, apiDetails, tempDir, variant, featureEnablement, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, variant, featureFlags, logger, true);
|
||||
const { codeql, toolsVersion } = await (0, codeql_1.setupCodeQL)(codeqlURL, apiDetails, tempDir, variant, featureEnablement, logger, true);
|
||||
await codeql.printVersion();
|
||||
logger.endGroup();
|
||||
return { codeql, toolsVersion };
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
logger.startGroup("Load language configuration");
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureFlags, logger);
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
async function runInit(codeql, config, sourceRoot, processName, processLevel, featureFlags, logger) {
|
||||
async function runInit(codeql, config, sourceRoot, processName, processLevel, featureEnablement, logger) {
|
||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||
try {
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Init a database cluster
|
||||
await codeql.databaseInitCluster(config, sourceRoot, processName, processLevel, featureFlags, logger);
|
||||
await codeql.databaseInitCluster(config, sourceRoot, processName, processLevel, featureEnablement, logger);
|
||||
}
|
||||
else {
|
||||
for (const language of config.languages) {
|
||||
@@ -63,7 +63,7 @@ async function runInit(codeql, config, sourceRoot, processName, processLevel, fe
|
||||
catch (e) {
|
||||
throw processError(e);
|
||||
}
|
||||
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql, await util.isGoExtractionReconciliationEnabled(featureFlags), logger);
|
||||
return await (0, tracer_config_1.getCombinedTracerConfig)(config, codeql, await util.isGoExtractionReconciliationEnabled(featureEnablement), logger);
|
||||
}
|
||||
exports.runInit = runInit;
|
||||
/**
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,OAAO,EACP,YAAY,EACZ,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,YAA0B,EAC1B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,YAAY,EACZ,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC,EAChC,YAA0B,EAC1B,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,YAAY,EACZ,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAClC,MAAM,EACN,MAAM,EACN,MAAM,IAAI,CAAC,mCAAmC,CAAC,YAAY,CAAC,EAC5D,MAAM,CACP,CAAC;AACJ,CAAC;AAzCD,0BAyCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA2E;AAC3E,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAC/B,iCAA4C;AAErC,KAAK,UAAU,UAAU,CAC9B,SAA6B,EAC7B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,MAAM,IAAA,oBAAW,EAChD,SAAS,EACT,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACF,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,CAAC;AAClC,CAAC;AArBD,gCAqBC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,iBAAoC,EACpC,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA5CD,gCA4CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,YAAgC,EAChC,iBAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAErD,IAAI;QACF,IAAI,MAAM,IAAA,yBAAkB,EAAC,MAAM,EAAE,mCAA0B,CAAC,EAAE;YAChE,0BAA0B;YAC1B,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,YAAY,EACZ,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;aAAM;YACL,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;gBACvC,yBAAyB;gBACzB,MAAM,MAAM,CAAC,YAAY,CACvB,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAC5C,QAAQ,EACR,UAAU,CACX,CAAC;aACH;SACF;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAClC,MAAM,EACN,MAAM,EACN,MAAM,IAAI,CAAC,mCAAmC,CAAC,iBAAiB,CAAC,EACjE,MAAM,CACP,CAAC;AACJ,CAAC;AAzCD,0BAyCC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,8BAA8B,CAAC;SACnD,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,uCAAuC,CAAC,CAAA,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,wCAAwC,CAAC;;QAC7D,gEAAgE;QAChE,MAAA,CAAC,CAAC,OAAO,0CAAE,QAAQ,CAAC,qBAAqB,CAAC,CAAA,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAED,sEAAsE;AACtE,4EAA4E;AAC5E,4EAA4E;AAC5E,6EAA6E;AAC7E,+CAA+C;AACxC,KAAK,UAAU,mBAAmB,CACvC,WAA+B,EAC/B,YAAgC,EAChC,MAA0B,EAC1B,MAAc,EACd,YAA0B;IAE1B,IAAI,MAAc,CAAC;IACnB,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,GAAG;;;;;;;;;;;;uCAY0B,WAAW;;8BAEpB,WAAW;;;;;;;;gDAQO,CAAC;KAC9C;SAAM;QACL,oEAAoE;QACpE,mFAAmF;QACnF,+EAA+E;QAC/E,kFAAkF;QAClF,6EAA6E;QAC7E,oFAAoF;QACpF,6CAA6C;QAC7C,YAAY,GAAG,YAAY,IAAI,CAAC,CAAC;QACjC,MAAM,GAAG;;;;;;;;4BAQe,YAAY;;;;;;;;;;;;;;;;;;;;;gDAqBQ,CAAC;KAC9C;IAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,mBAAmB,CAAC,CAAC;IACxE,EAAE,CAAC,aAAa,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC;IAE3C,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EACvC;QACE,kBAAkB;QAClB,QAAQ;QACR,OAAO;QACP,gBAAgB;QAChB,IAAI,CAAC,OAAO,CACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC,EAC9B,OAAO,EACP,OAAO,EACP,YAAY,CACb;KACF,EACD,EAAE,GAAG,EAAE,EAAE,0BAA0B,EAAE,YAAY,CAAC,IAAI,EAAE,EAAE,CAC3D,CAAC,IAAI,EAAE,CAAC;AACX,CAAC;AA5FD,kDA4FC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
14
lib/runner.js
generated
14
lib/runner.js
generated
@@ -30,11 +30,11 @@ const analyze_1 = require("./analyze");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const init_1 = require("./init");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
@@ -152,14 +152,14 @@ program
|
||||
codeql = await (0, codeql_1.getCodeQL)(cmd.codeqlPath);
|
||||
}
|
||||
else {
|
||||
codeql = (await (0, init_1.initCodeQL)(undefined, apiDetails, tempDir, gitHubVersion.type, (0, feature_flags_1.createFeatureFlags)([]), logger)).codeql;
|
||||
codeql = (await (0, init_1.initCodeQL)(undefined, apiDetails, tempDir, gitHubVersion.type, (0, testing_utils_1.createFeatures)([]), logger)).codeql;
|
||||
}
|
||||
await (0, util_1.enrichEnvironment)(util_1.Mode.runner, codeql);
|
||||
const workspacePath = checkoutPath;
|
||||
const config = await (0, init_1.initConfig)(cmd.languages, cmd.queries, cmd.packs, undefined, // we won't support registries in the runner
|
||||
cmd.configFile, undefined, false, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
cmd.configFile, undefined, false, false, "", "", (0, repository_1.parseRepositoryNwo)(cmd.repository), tempDir, codeql, workspacePath, gitHubVersion, apiDetails, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
const sourceRoot = checkoutPath;
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel(), (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, parseTraceProcessName(), parseTraceProcessLevel(), (0, testing_utils_1.createFeatures)([]), logger);
|
||||
if (tracerConfig === undefined) {
|
||||
return;
|
||||
}
|
||||
@@ -231,7 +231,7 @@ program
|
||||
languages = [language];
|
||||
}
|
||||
else {
|
||||
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, (0, feature_flags_1.createFeatureFlags)([]), logger);
|
||||
languages = await (0, autobuild_1.determineAutobuildLanguages)(config, (0, testing_utils_1.createFeatures)([]), logger);
|
||||
}
|
||||
if (languages !== undefined) {
|
||||
for (const language of languages) {
|
||||
@@ -294,8 +294,8 @@ program
|
||||
}
|
||||
const threads = (0, util_1.getThreadsFlag)(cmd.threads || initEnv["CODEQL_THREADS"], logger);
|
||||
const memory = (0, util_1.getMemoryFlag)(cmd.ram || initEnv["CODEQL_RAM"]);
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger, (0, feature_flags_1.createFeatureFlags)([]));
|
||||
await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, (0, testing_utils_1.createFeatures)([]));
|
||||
await (0, analyze_1.runQueries)(outputDir, memory, (0, util_1.getAddSnippetsFlag)(cmd.addSnippets), threads, cmd.category, config, logger, (0, testing_utils_1.createFeatures)([]));
|
||||
if (!cmd.upload) {
|
||||
logger.info("Not uploading results");
|
||||
return;
|
||||
|
||||
File diff suppressed because one or more lines are too long
3
lib/shared-environment.js
generated
3
lib/shared-environment.js
generated
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// The time at which the first action (normally init) started executing.
|
||||
// If a workflow invokes a different action without first invoking the init
|
||||
@@ -8,4 +8,5 @@ exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC"}
|
||||
23
lib/testing-utils.js
generated
23
lib/testing-utils.js
generated
@@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
exports.createFeatures = exports.mockCodeQLVersion = exports.mockFeatureFlagApiEndpoint = exports.getRecordingLogger = exports.setupActionsVars = exports.setupTests = void 0;
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const apiClient = __importStar(require("./api-client"));
|
||||
@@ -144,4 +144,25 @@ function mockFeatureFlagApiEndpoint(responseStatusCode, response) {
|
||||
sinon.stub(apiClient, "getApiClient").value(() => client);
|
||||
}
|
||||
exports.mockFeatureFlagApiEndpoint = mockFeatureFlagApiEndpoint;
|
||||
function mockCodeQLVersion(version) {
|
||||
return {
|
||||
async getVersion() {
|
||||
return version;
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.mockCodeQLVersion = mockCodeQLVersion;
|
||||
/**
|
||||
* Create a feature enablement instance with the specified set of enabled features.
|
||||
*
|
||||
* This should be only used within tests.
|
||||
*/
|
||||
function createFeatures(enabledFeatures) {
|
||||
return {
|
||||
getValue: async (feature) => {
|
||||
return enabledFeatures.includes(feature);
|
||||
},
|
||||
};
|
||||
}
|
||||
exports.createFeatures = createFeatures;
|
||||
//# sourceMappingURL=testing-utils.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAEnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,2EAA2E;QAC3E,2EAA2E;QAC3E,yCAAyC;QACzC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAClC,CAAC;QACF,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QAED,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAjDD,gCAiDC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC"}
|
||||
{"version":3,"file":"testing-utils.js","sourceRoot":"","sources":["../src/testing-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,wDAA0C;AAE1C,6CAA+B;AAE/B,wDAA0C;AAC1C,iDAAmC;AAGnC,iCAAmC;AASnC,SAAS,UAAU,CAAC,OAAoB;IACtC,8CAA8C;IAC9C,gCAAgC;IAChC,2EAA2E;IAC3E,2FAA2F;IAC3F,OAAO,CACL,KAA0B,EAC1B,QAAiB,EACjB,EAA0B,EACjB,EAAE;QACX,2CAA2C;QAC3C,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;YACtD,EAAE,GAAG,QAAQ,CAAC;YACd,QAAQ,GAAG,SAAS,CAAC;SACtB;QAED,oBAAoB;QACpB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,OAAO,CAAC,UAAU,IAAI,KAAK,CAAC;SAC7B;aAAM;YACL,OAAO,CAAC,UAAU,IAAI,IAAI,WAAW,CAAC,QAAQ,IAAI,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC1E;QAED,iDAAiD;QACjD,IAAI,EAAE,KAAK,SAAS,IAAI,OAAO,EAAE,KAAK,UAAU,EAAE;YAChD,EAAE,EAAE,CAAC;SACN;QAED,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAED,SAAgB,UAAU,CAAC,IAAiB;IAC1C,MAAM,SAAS,GAAG,IAA2B,CAAC;IAE9C,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,EAAE;QACzB,gEAAgE;QAChE,0CAA0C;QAC1C,MAAM,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;QAErB,iEAAiE;QACjE,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;QAC1B,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QACpD,MAAM,kBAAkB,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;QACrE,CAAC,CAAC,OAAO,CAAC,WAAW,GAAG,kBAAkB,CAAC;QAC3C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAQ,CAAC;QAEpD,2EAA2E;QAC3E,2EAA2E;QAC3E,yCAAyC;QACzC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAC9C,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,MAAM,CAClC,CAAC;QACF,IAAI,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACvB,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QAED,mEAAmE;QACnE,wEAAwE;QACxE,kEAAkE;QAClE,CAAC,CAAC,OAAO,CAAC,GAAG,GAAG,EAAE,CAAC;QACnB,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;IAC5C,CAAC,CAAC,CAAC;IAEH,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE;QAC/B,4BAA4B;QAC5B,0DAA0D;QAC1D,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,OAAO,CAAC,MAAM,CAAC,KAAK,GAAG,CAAC,CAAC,OAAO,CAAC,WAAW,CAAC;QAC7C,IAAI,CAAC,CAAC,CAAC,MAAM,EAAE;YACb,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;SAC5C;QAED,uCAAuC;QACvC,KAAK,CAAC,OAAO,EAAE,CAAC;QAEhB,oCAAoC;QACpC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAjDD,gCAiDC;AAED,yEAAyE;AACzE,sDAAsD;AACtD,SAAgB,gBAAgB,CAAC,OAAe,EAAE,QAAgB;IAChE,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,OAAO,CAAC;IACrC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,GAAG,QAAQ,CAAC;IAC5C,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,OAAO,CAAC;AAC5C,CAAC;AAJD,4CAIC;AAOD,SAAgB,kBAAkB,CAAC,QAAyB;IAC1D,OAAO;QACL,KAAK,EAAE,CAAC,OAAe,EAAE,EAAE;YACzB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,IAAI,EAAE,CAAC,OAAe,EAAE,EAAE;YACxB,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;YACzC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,OAAO,EAAE,CAAC,OAAuB,EAAE,EAAE;YACnC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,SAAS,EAAE,OAAO,EAAE,CAAC,CAAC;YAC5C,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACxB,CAAC;QACD,KAAK,EAAE,CAAC,OAAuB,EAAE,EAAE;YACjC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC;YAC1C,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACzB,CAAC;QACD,OAAO,EAAE,GAAG,EAAE,CAAC,IAAI;QACnB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAtBD,gDAsBC;AAED,0EAA0E;AAC1E,SAAgB,0BAA0B,CACxC,kBAA0B,EAC1B,QAAyC;IAEzC,kEAAkE;IAClE,MAAM,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IAExC,MAAM,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IAEjD,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAClC,8DAA8D,CAC/D,CAAC;IACF,IAAI,kBAAkB,GAAG,GAAG,EAAE;QAC5B,QAAQ,CAAC,QAAQ,CAAC;YAChB,MAAM,EAAE,kBAAkB;YAC1B,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,8DAA8D;SACpE,CAAC,CAAC;KACJ;SAAM;QACL,QAAQ,CAAC,MAAM,CAAC,IAAI,gBAAS,CAAC,oBAAoB,EAAE,kBAAkB,CAAC,CAAC,CAAC;KAC1E;IAED,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;AAC5D,CAAC;AAxBD,gEAwBC;AAED,SAAgB,iBAAiB,CAAC,OAAO;IACvC,OAAO;QACL,KAAK,CAAC,UAAU;YACd,OAAO,OAAO,CAAC;QACjB,CAAC;KACe,CAAC;AACrB,CAAC;AAND,8CAMC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,eAA0B;IACvD,OAAO;QACL,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE;YAC1B,OAAO,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC3C,CAAC;KACF,CAAC;AACJ,CAAC;AAND,wCAMC"}
|
||||
9
lib/trap-caching.js
generated
9
lib/trap-caching.js
generated
@@ -100,10 +100,11 @@ async function downloadTrapCaches(codeql, languages, logger) {
|
||||
// The SHA from the base of the PR is the most similar commit we might have a cache for
|
||||
const preferredKey = await cacheKey(codeql, language, baseSha);
|
||||
logger.info(`Looking in Actions cache for TRAP cache with key ${preferredKey}`);
|
||||
const found = await (0, util_1.withTimeout)(MAX_CACHE_OPERATION_MS, cache.restoreCache([cacheDir], preferredKey, [
|
||||
await cachePrefix(codeql, language), // Fall back to any cache with the right key prefix
|
||||
]), () => {
|
||||
logger.info(`Timed out waiting for TRAP cache download for ${language}, will continue without it`);
|
||||
const found = await cache.restoreCache([cacheDir], preferredKey, [
|
||||
// Fall back to any cache with the right key prefix
|
||||
await cachePrefix(codeql, language),
|
||||
], {
|
||||
segmentTimeoutInMs: MAX_CACHE_OPERATION_MS,
|
||||
});
|
||||
if (found === undefined) {
|
||||
// We didn't find a TRAP cache in the Actions cache, so the directory on disk is
|
||||
|
||||
File diff suppressed because one or more lines are too long
37
lib/upload-lib.js
generated
37
lib/upload-lib.js
generated
@@ -22,9 +22,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
exports.pruneInvalidResults = exports.validateUniqueCategory = exports.waitForProcessing = exports.buildPayload = exports.validateSarifFileSchema = exports.countResultsInSarif = exports.uploadFromRunner = exports.uploadFromActions = exports.findSarifFilesInDir = exports.populateRunAutomationDetails = exports.combineSarifFiles = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const process_1 = require("process");
|
||||
const zlib_1 = __importDefault(require("zlib"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const file_url_1 = __importDefault(require("file-url"));
|
||||
@@ -269,6 +270,8 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||
let sarif = combineSarifFiles(sarifFiles);
|
||||
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
|
||||
sarif = populateRunAutomationDetails(sarif, category, analysisKey, environment);
|
||||
if (process_1.env["CODEQL_DISABLE_SARIF_PRUNING"] !== "true")
|
||||
sarif = pruneInvalidResults(sarif, logger);
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
validateUniqueCategory(sarif);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
@@ -376,4 +379,36 @@ exports.validateUniqueCategory = validateUniqueCategory;
|
||||
function sanitize(str) {
|
||||
return (str !== null && str !== void 0 ? str : "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
|
||||
}
|
||||
function pruneInvalidResults(sarif, logger) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
||||
let pruned = 0;
|
||||
const newRuns = [];
|
||||
for (const run of sarif.runs || []) {
|
||||
if (((_b = (_a = run.tool) === null || _a === void 0 ? void 0 : _a.driver) === null || _b === void 0 ? void 0 : _b.name) === "CodeQL" &&
|
||||
((_d = (_c = run.tool) === null || _c === void 0 ? void 0 : _c.driver) === null || _d === void 0 ? void 0 : _d.semanticVersion) === "2.11.2") {
|
||||
// Version 2.11.2 of the CodeQL CLI had many false positives in the
|
||||
// rb/weak-cryptographic-algorithm query which we prune here. The
|
||||
// issue is tracked in https://github.com/github/codeql/issues/11107.
|
||||
const newResults = [];
|
||||
for (const result of run.results || []) {
|
||||
if (result.ruleId === "rb/weak-cryptographic-algorithm" &&
|
||||
(((_f = (_e = result.message) === null || _e === void 0 ? void 0 : _e.text) === null || _f === void 0 ? void 0 : _f.includes(" MD5 ")) ||
|
||||
((_h = (_g = result.message) === null || _g === void 0 ? void 0 : _g.text) === null || _h === void 0 ? void 0 : _h.includes(" SHA1 ")))) {
|
||||
pruned += 1;
|
||||
continue;
|
||||
}
|
||||
newResults.push(result);
|
||||
}
|
||||
newRuns.push({ ...run, results: newResults });
|
||||
}
|
||||
else {
|
||||
newRuns.push(run);
|
||||
}
|
||||
}
|
||||
if (pruned > 0) {
|
||||
logger.info(`Pruned ${pruned} results believed to be invalid from SARIF file.`);
|
||||
}
|
||||
return { ...sarif, runs: newRuns };
|
||||
}
|
||||
exports.pruneInvalidResults = pruneInvalidResults;
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
100
lib/upload-lib.test.js
generated
100
lib/upload-lib.test.js
generated
@@ -28,6 +28,7 @@ const ava_1 = __importDefault(require("ava"));
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const uploadLib = __importStar(require("./upload-lib"));
|
||||
const upload_lib_1 = require("./upload-lib");
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
ava_1.default.beforeEach(() => {
|
||||
@@ -200,6 +201,105 @@ ava_1.default.beforeEach(() => {
|
||||
t.throws(() => uploadLib.validateUniqueCategory(sarif1));
|
||||
t.throws(() => uploadLib.validateUniqueCategory(sarif2));
|
||||
});
|
||||
(0, ava_1.default)("pruneInvalidResults", (t) => {
|
||||
const loggedMessages = [];
|
||||
const mockLogger = {
|
||||
info: (message) => {
|
||||
loggedMessages.push(message);
|
||||
},
|
||||
};
|
||||
const sarif = {
|
||||
runs: [
|
||||
{
|
||||
tool: otherTool,
|
||||
results: [resultWithBadMessage1, resultWithGoodMessage],
|
||||
},
|
||||
{
|
||||
tool: affectedCodeQLVersion,
|
||||
results: [
|
||||
resultWithOtherRuleId,
|
||||
resultWithBadMessage1,
|
||||
resultWithBadMessage2,
|
||||
resultWithGoodMessage,
|
||||
],
|
||||
},
|
||||
{
|
||||
tool: unaffectedCodeQLVersion,
|
||||
results: [resultWithBadMessage1, resultWithGoodMessage],
|
||||
},
|
||||
],
|
||||
};
|
||||
const result = (0, upload_lib_1.pruneInvalidResults)(sarif, mockLogger);
|
||||
const expected = {
|
||||
runs: [
|
||||
{
|
||||
tool: otherTool,
|
||||
results: [resultWithBadMessage1, resultWithGoodMessage],
|
||||
},
|
||||
{
|
||||
tool: affectedCodeQLVersion,
|
||||
results: [resultWithOtherRuleId, resultWithGoodMessage],
|
||||
},
|
||||
{
|
||||
tool: unaffectedCodeQLVersion,
|
||||
results: [resultWithBadMessage1, resultWithGoodMessage],
|
||||
},
|
||||
],
|
||||
};
|
||||
t.deepEqual(result, expected);
|
||||
t.deepEqual(loggedMessages.length, 1);
|
||||
t.assert(loggedMessages[0].includes("Pruned 2 results"));
|
||||
});
|
||||
const affectedCodeQLVersion = {
|
||||
driver: {
|
||||
name: "CodeQL",
|
||||
semanticVersion: "2.11.2",
|
||||
},
|
||||
};
|
||||
const unaffectedCodeQLVersion = {
|
||||
driver: {
|
||||
name: "CodeQL",
|
||||
semanticVersion: "2.11.3",
|
||||
},
|
||||
};
|
||||
const otherTool = {
|
||||
driver: {
|
||||
name: "Some other tool",
|
||||
semanticVersion: "2.11.2",
|
||||
},
|
||||
};
|
||||
const resultWithOtherRuleId = {
|
||||
ruleId: "doNotPrune",
|
||||
message: {
|
||||
text: "should not be pruned even though it says MD5 in it",
|
||||
},
|
||||
locations: [],
|
||||
partialFingerprints: {},
|
||||
};
|
||||
const resultWithGoodMessage = {
|
||||
ruleId: "rb/weak-cryptographic-algorithm",
|
||||
message: {
|
||||
text: "should not be pruned SHA128 is not a FP",
|
||||
},
|
||||
locations: [],
|
||||
partialFingerprints: {},
|
||||
};
|
||||
const resultWithBadMessage1 = {
|
||||
ruleId: "rb/weak-cryptographic-algorithm",
|
||||
message: {
|
||||
text: "should be pruned MD5 is a FP",
|
||||
},
|
||||
locations: [],
|
||||
partialFingerprints: {},
|
||||
};
|
||||
const resultWithBadMessage2 = {
|
||||
ruleId: "rb/weak-cryptographic-algorithm",
|
||||
message: {
|
||||
text: "should be pruned SHA1 is a FP",
|
||||
},
|
||||
locations: [],
|
||||
partialFingerprints: {},
|
||||
};
|
||||
function createMockSarif(id, tool) {
|
||||
return {
|
||||
runs: [
|
||||
|
||||
File diff suppressed because one or more lines are too long
80
lib/util.js
generated
80
lib/util.js
generated
@@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.withTimeout = exports.tryGetFolderBytes = exports.isGoExtractionReconciliationEnabled = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isGitHubGhesVersionBelow = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.isHostedRunner = exports.withTimeout = exports.tryGetFolderBytes = exports.isGoExtractionReconciliationEnabled = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.isActions = exports.getMode = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.Mode = exports.assertNever = exports.getGitHubAuth = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -424,12 +424,6 @@ var EnvVar;
|
||||
* own sandwiched workflow mechanism
|
||||
*/
|
||||
EnvVar["FEATURE_SANDWICH"] = "CODEQL_ACTION_FEATURE_SANDWICH";
|
||||
/**
|
||||
* If set to the "true" string and the codeql CLI version is greater than
|
||||
* `CODEQL_VERSION_CONFIG_FILES`, then the codeql-action will pass the
|
||||
* the codeql-config file to the codeql CLI to be processed there.
|
||||
*/
|
||||
EnvVar["CODEQL_PASS_CONFIG_TO_CLI"] = "CODEQL_PASS_CONFIG_TO_CLI";
|
||||
})(EnvVar = exports.EnvVar || (exports.EnvVar = {}));
|
||||
const exportVar = (mode, name, value) => {
|
||||
if (mode === Mode.actions) {
|
||||
@@ -491,9 +485,6 @@ function getRequiredEnvParam(paramName) {
|
||||
return value;
|
||||
}
|
||||
exports.getRequiredEnvParam = getRequiredEnvParam;
|
||||
function getOptionalEnvParam(paramName) {
|
||||
return process.env[paramName] || "";
|
||||
}
|
||||
class HTTPError extends Error {
|
||||
constructor(message, status) {
|
||||
super(message);
|
||||
@@ -515,11 +506,6 @@ function isHTTPError(arg) {
|
||||
return (arg === null || arg === void 0 ? void 0 : arg.status) !== undefined && Number.isInteger(arg.status);
|
||||
}
|
||||
exports.isHTTPError = isHTTPError;
|
||||
function isGitHubGhesVersionBelow(gitHubVersion, expectedVersion) {
|
||||
return (gitHubVersion.type === GitHubVariant.GHES &&
|
||||
semver.lt(gitHubVersion.version, expectedVersion));
|
||||
}
|
||||
exports.isGitHubGhesVersionBelow = isGitHubGhesVersionBelow;
|
||||
let cachedCodeQlVersion = undefined;
|
||||
function cacheCodeQlVersion(version) {
|
||||
if (cachedCodeQlVersion !== undefined) {
|
||||
@@ -662,25 +648,12 @@ exports.isInTestMode = isInTestMode;
|
||||
* @returns true if the action should generate a conde-scanning config file
|
||||
* that gets passed to the CLI.
|
||||
*/
|
||||
async function useCodeScanningConfigInCli(codeql, featureFlags) {
|
||||
const envVarIsEnabled = getOptionalEnvParam(EnvVar.CODEQL_PASS_CONFIG_TO_CLI);
|
||||
// If the user has explicitly turned off the feature, then don't use it.
|
||||
if (envVarIsEnabled.toLocaleLowerCase() === "false") {
|
||||
return false;
|
||||
}
|
||||
// If the user has explicitly turned on the feature, then use it.
|
||||
// Or if the feature flag is enabled, then use it.
|
||||
const isEnabled = envVarIsEnabled.toLocaleLowerCase() === "true" ||
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.CliConfigFileEnabled));
|
||||
if (!isEnabled) {
|
||||
return false;
|
||||
}
|
||||
// If the CLI version is too old, then don't use it.
|
||||
return await codeQlVersionAbove(codeql, codeql_1.CODEQL_VERSION_CONFIG_FILES);
|
||||
async function useCodeScanningConfigInCli(codeql, featureEnablement) {
|
||||
return await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
||||
}
|
||||
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
||||
async function logCodeScanningConfigInCli(codeql, featureFlags, logger) {
|
||||
if (await useCodeScanningConfigInCli(codeql, featureFlags)) {
|
||||
async function logCodeScanningConfigInCli(codeql, featureEnablement, logger) {
|
||||
if (await useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
||||
}
|
||||
else {
|
||||
@@ -721,9 +694,8 @@ function listFolder(dir) {
|
||||
return files;
|
||||
}
|
||||
exports.listFolder = listFolder;
|
||||
async function isGoExtractionReconciliationEnabled(featureFlags) {
|
||||
return (process.env["CODEQL_ACTION_RECONCILE_GO_EXTRACTION"] === "true" ||
|
||||
(await featureFlags.getValue(feature_flags_1.FeatureFlag.GolangExtractionReconciliationEnabled)));
|
||||
async function isGoExtractionReconciliationEnabled(featureEnablement) {
|
||||
return await featureEnablement.getValue(feature_flags_1.Feature.GolangExtractionReconciliationEnabled);
|
||||
}
|
||||
exports.isGoExtractionReconciliationEnabled = isGoExtractionReconciliationEnabled;
|
||||
/**
|
||||
@@ -748,19 +720,53 @@ exports.tryGetFolderBytes = tryGetFolderBytes;
|
||||
* Run a promise for a given amount of time, and if it doesn't resolve within
|
||||
* that time, call the provided callback and then return undefined.
|
||||
*
|
||||
* Important: This does NOT cancel the original promise, so that promise will
|
||||
* continue in the background even after the timeout has expired. If the
|
||||
* original promise hangs, then this will prevent the process terminating.
|
||||
*
|
||||
* @param timeoutMs The timeout in milliseconds.
|
||||
* @param promise The promise to run.
|
||||
* @param onTimeout A callback to call if the promise times out.
|
||||
* @returns The result of the promise, or undefined if the promise times out.
|
||||
*/
|
||||
async function withTimeout(timeoutMs, promise, onTimeout) {
|
||||
let finished = false;
|
||||
const mainTask = async () => {
|
||||
const result = await promise;
|
||||
finished = true;
|
||||
return result;
|
||||
};
|
||||
const timeout = new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
onTimeout();
|
||||
if (!finished)
|
||||
onTimeout();
|
||||
resolve(undefined);
|
||||
}, timeoutMs);
|
||||
});
|
||||
return await Promise.race([promise, timeout]);
|
||||
return await Promise.race([mainTask(), timeout]);
|
||||
}
|
||||
exports.withTimeout = withTimeout;
|
||||
/**
|
||||
* This function implements a heuristic to determine whether the
|
||||
* runner we are on is hosted by GitHub. It does this by checking
|
||||
* the name of the runner against the list of known GitHub-hosted
|
||||
* runner names. It also checks for the presence of a toolcache
|
||||
* directory with the name hostedtoolcache which is present on
|
||||
* GitHub-hosted runners.
|
||||
*
|
||||
* @returns true iff the runner is hosted by GitHub
|
||||
*/
|
||||
function isHostedRunner() {
|
||||
var _a, _b, _c;
|
||||
return (
|
||||
// Name of the runner on hosted Windows runners
|
||||
((_a = process.env["RUNNER_NAME"]) === null || _a === void 0 ? void 0 : _a.includes("Hosted Agent")) ||
|
||||
(
|
||||
// Name of the runner on hosted POSIX runners
|
||||
(_b = process.env["RUNNER_NAME"]) === null || _b === void 0 ? void 0 : _b.includes("GitHub Actions")) ||
|
||||
(
|
||||
// Segment of the path to the tool cache on all hosted runners
|
||||
(_c = process.env["RUNNER_TOOL_CACHE"]) === null || _c === void 0 ? void 0 : _c.includes("hostedtoolcache")));
|
||||
}
|
||||
exports.isHostedRunner = isHostedRunner;
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
56
lib/util.test.js
generated
56
lib/util.test.js
generated
@@ -31,7 +31,6 @@ const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -274,13 +273,6 @@ for (const [packs, expectedStatus] of ML_POWERED_JS_STATUS_TESTS) {
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("isGitHubGhesVersionBelow", async (t) => {
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.DOTCOM }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHAE }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.3.0" }, "3.2.0"));
|
||||
t.falsy(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.2.0" }, "3.2.0"));
|
||||
t.true(util.isGitHubGhesVersionBelow({ type: util.GitHubVariant.GHES, version: "3.1.2" }, "3.2.0"));
|
||||
});
|
||||
function formatGitHubVersion(version) {
|
||||
switch (version.type) {
|
||||
case util.GitHubVariant.DOTCOM:
|
||||
@@ -362,40 +354,6 @@ for (const [version, githubVersion, shouldReportWarning,] of CHECK_ACTION_VERSIO
|
||||
]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("useCodeScanningConfigInCli with no env var", async (t) => {
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
// Yay! It works!
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled])));
|
||||
});
|
||||
for (const val of ["TRUE", "true", "True"]) {
|
||||
(0, ava_1.default)(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
// Yay! It works!
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled])));
|
||||
t.assert(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([])));
|
||||
});
|
||||
}
|
||||
for (const val of ["FALSE", "false", "False"]) {
|
||||
(0, ava_1.default)(`useCodeScanningConfigInCli with env var ${val}`, async (t) => {
|
||||
// Never turned on when env var is false
|
||||
process.env[util.EnvVar.CODEQL_PASS_CONFIG_TO_CLI] = val;
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.0"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([feature_flags_1.FeatureFlag.CliConfigFileEnabled]))));
|
||||
t.assert(!(await util.useCodeScanningConfigInCli(mockVersion("2.10.1"), (0, feature_flags_1.createFeatureFlags)([]))));
|
||||
});
|
||||
}
|
||||
function mockVersion(version) {
|
||||
return {
|
||||
async getVersion() {
|
||||
return version;
|
||||
},
|
||||
};
|
||||
}
|
||||
const longTime = 999999;
|
||||
const shortTime = 10;
|
||||
(0, ava_1.default)("withTimeout on long task", async (t) => {
|
||||
@@ -424,4 +382,18 @@ const shortTime = 10;
|
||||
t.deepEqual(shortTaskTimedOut, false);
|
||||
t.deepEqual(result, 99);
|
||||
});
|
||||
(0, ava_1.default)("withTimeout doesn't call callback if promise resolves", async (t) => {
|
||||
let shortTaskTimedOut = false;
|
||||
const shortTask = new Promise((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve(99);
|
||||
}, shortTime);
|
||||
});
|
||||
const result = await util.withTimeout(100, shortTask, () => {
|
||||
shortTaskTimedOut = true;
|
||||
});
|
||||
await new Promise((r) => setTimeout(r, 200));
|
||||
t.deepEqual(shortTaskTimedOut, false);
|
||||
t.deepEqual(result, 99);
|
||||
});
|
||||
//# sourceMappingURL=util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
66
node_modules/.package-lock.json
generated
vendored
66
node_modules/.package-lock.json
generated
vendored
@@ -1,34 +1,26 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.1.28",
|
||||
"version": "2.1.32",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@actions/artifact": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.0.0.tgz",
|
||||
"integrity": "sha512-oje+cCiM2maVwoiN+LT9kh2C6UqiTcS1tDKins+nRfckX+C8JJD2kAmzpD5fn/p5Dibjrqk1mtwreAzgNxHrDg==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.0.tgz",
|
||||
"integrity": "sha512-shO+w/BAnzRnFhfsgUao8sxjByAMqDdfvek2LLKeCueBKXoTrAcp7U/hs9Fdx+z9g7Q0mbIrmHAzAAww4HK1bQ==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/http-client": "^1.0.11",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"tmp": "^0.2.1",
|
||||
"tmp-promise": "^3.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/artifact/node_modules/@actions/http-client": {
|
||||
"version": "1.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz",
|
||||
"integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==",
|
||||
"dependencies": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
|
||||
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
@@ -57,9 +49,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/core": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz",
|
||||
"integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==",
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
|
||||
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
|
||||
"dependencies": {
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"uuid": "^8.3.2"
|
||||
@@ -74,9 +66,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/exec": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.0.tgz",
|
||||
"integrity": "sha512-LImpN9AY0J1R1mEYJjVJfSZWU4zYOlEcwSTgPve1rFQqK5AwrEs6uWW5Rv70gbDIQIAUwI86z6B+9mPK4w9Sbg==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
|
||||
"integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
@@ -117,18 +109,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.1.tgz",
|
||||
"integrity": "sha512-Qi4JoKXjmE0O67wAOH6y0n26QXhMKMFo7GD/4IXNVcrtLjUlGjGuVys6pQgwF3ArfGTQu0XpqaNr0YhED2RaRA=="
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.2.tgz",
|
||||
"integrity": "sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw=="
|
||||
},
|
||||
"node_modules/@actions/tool-cache": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.0.tgz",
|
||||
"integrity": "sha512-/5/R16ddC2hw48UO5T/d1gwzsyZjBTttGLV74xnLgfD/7nJKk17ArgCi3YXj3et/IAatMDEqaTM5kw5Hevgd5A==",
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-2.0.1.tgz",
|
||||
"integrity": "sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.0",
|
||||
"@actions/http-client": "^2.0.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.1.1",
|
||||
"semver": "^6.1.0",
|
||||
"uuid": "^3.3.2"
|
||||
@@ -603,9 +595,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/openapi-types": {
|
||||
"version": "13.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-13.4.0.tgz",
|
||||
"integrity": "sha512-2mVzW0X1+HDO3jF80/+QFZNzJiTefELKbhMu6yaBYbp/1gSMkVDm4rT472gJljTokWUlXaaE63m7WrWENhMDLw=="
|
||||
"version": "14.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-14.0.0.tgz",
|
||||
"integrity": "sha512-HNWisMYlR8VCnNurDU6os2ikx0s0VyEjDYHNS/h4cgb8DeOxQ0n72HyinUtdDVxJhFy3FWLGl0DJhfEWk3P5Iw=="
|
||||
},
|
||||
"node_modules/@octokit/plugin-paginate-rest": {
|
||||
"version": "2.4.0",
|
||||
@@ -703,11 +695,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/types": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-7.1.1.tgz",
|
||||
"integrity": "sha512-Dx6cNTORyVaKY0Yeb9MbHksk79L8GXsihbG6PtWqTpkyA2TY1qBWE26EQXVG3dHwY9Femdd/WEeRUEiD0+H3TQ==",
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-8.0.0.tgz",
|
||||
"integrity": "sha512-65/TPpOJP1i3K4lBJMnWqPUJ6zuOtzhtagDvydAWbEXpbFYA0oMKKyLb95NFZZP0lSh/4b6K+DQlzvYQJQQePg==",
|
||||
"dependencies": {
|
||||
"@octokit/openapi-types": "^13.4.0"
|
||||
"@octokit/openapi-types": "^14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@opentelemetry/api": {
|
||||
|
||||
21
node_modules/@actions/artifact/lib/internal/crc64.d.ts
generated
vendored
Normal file
21
node_modules/@actions/artifact/lib/internal/crc64.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* CRC64: cyclic redundancy check, 64-bits
|
||||
*
|
||||
* In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to
|
||||
* validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg,
|
||||
* but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
|
||||
*
|
||||
* This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that
|
||||
* is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
|
||||
*/
|
||||
/// <reference types="node" />
|
||||
export declare type CRC64DigestEncoding = 'hex' | 'base64' | 'buffer';
|
||||
declare class CRC64 {
|
||||
private _crc;
|
||||
constructor();
|
||||
update(data: Buffer | string): void;
|
||||
digest(encoding?: CRC64DigestEncoding): string | Buffer;
|
||||
private toBuffer;
|
||||
static flip64Bits(n: bigint): bigint;
|
||||
}
|
||||
export default CRC64;
|
||||
303
node_modules/@actions/artifact/lib/internal/crc64.js
generated
vendored
Normal file
303
node_modules/@actions/artifact/lib/internal/crc64.js
generated
vendored
Normal file
@@ -0,0 +1,303 @@
|
||||
"use strict";
|
||||
/**
|
||||
* CRC64: cyclic redundancy check, 64-bits
|
||||
*
|
||||
* In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to
|
||||
* validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg,
|
||||
* but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
|
||||
*
|
||||
* This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that
|
||||
* is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737)
|
||||
const PREGEN_POLY_TABLE = [
|
||||
BigInt('0x0000000000000000'),
|
||||
BigInt('0x7F6EF0C830358979'),
|
||||
BigInt('0xFEDDE190606B12F2'),
|
||||
BigInt('0x81B31158505E9B8B'),
|
||||
BigInt('0xC962E5739841B68F'),
|
||||
BigInt('0xB60C15BBA8743FF6'),
|
||||
BigInt('0x37BF04E3F82AA47D'),
|
||||
BigInt('0x48D1F42BC81F2D04'),
|
||||
BigInt('0xA61CECB46814FE75'),
|
||||
BigInt('0xD9721C7C5821770C'),
|
||||
BigInt('0x58C10D24087FEC87'),
|
||||
BigInt('0x27AFFDEC384A65FE'),
|
||||
BigInt('0x6F7E09C7F05548FA'),
|
||||
BigInt('0x1010F90FC060C183'),
|
||||
BigInt('0x91A3E857903E5A08'),
|
||||
BigInt('0xEECD189FA00BD371'),
|
||||
BigInt('0x78E0FF3B88BE6F81'),
|
||||
BigInt('0x078E0FF3B88BE6F8'),
|
||||
BigInt('0x863D1EABE8D57D73'),
|
||||
BigInt('0xF953EE63D8E0F40A'),
|
||||
BigInt('0xB1821A4810FFD90E'),
|
||||
BigInt('0xCEECEA8020CA5077'),
|
||||
BigInt('0x4F5FFBD87094CBFC'),
|
||||
BigInt('0x30310B1040A14285'),
|
||||
BigInt('0xDEFC138FE0AA91F4'),
|
||||
BigInt('0xA192E347D09F188D'),
|
||||
BigInt('0x2021F21F80C18306'),
|
||||
BigInt('0x5F4F02D7B0F40A7F'),
|
||||
BigInt('0x179EF6FC78EB277B'),
|
||||
BigInt('0x68F0063448DEAE02'),
|
||||
BigInt('0xE943176C18803589'),
|
||||
BigInt('0x962DE7A428B5BCF0'),
|
||||
BigInt('0xF1C1FE77117CDF02'),
|
||||
BigInt('0x8EAF0EBF2149567B'),
|
||||
BigInt('0x0F1C1FE77117CDF0'),
|
||||
BigInt('0x7072EF2F41224489'),
|
||||
BigInt('0x38A31B04893D698D'),
|
||||
BigInt('0x47CDEBCCB908E0F4'),
|
||||
BigInt('0xC67EFA94E9567B7F'),
|
||||
BigInt('0xB9100A5CD963F206'),
|
||||
BigInt('0x57DD12C379682177'),
|
||||
BigInt('0x28B3E20B495DA80E'),
|
||||
BigInt('0xA900F35319033385'),
|
||||
BigInt('0xD66E039B2936BAFC'),
|
||||
BigInt('0x9EBFF7B0E12997F8'),
|
||||
BigInt('0xE1D10778D11C1E81'),
|
||||
BigInt('0x606216208142850A'),
|
||||
BigInt('0x1F0CE6E8B1770C73'),
|
||||
BigInt('0x8921014C99C2B083'),
|
||||
BigInt('0xF64FF184A9F739FA'),
|
||||
BigInt('0x77FCE0DCF9A9A271'),
|
||||
BigInt('0x08921014C99C2B08'),
|
||||
BigInt('0x4043E43F0183060C'),
|
||||
BigInt('0x3F2D14F731B68F75'),
|
||||
BigInt('0xBE9E05AF61E814FE'),
|
||||
BigInt('0xC1F0F56751DD9D87'),
|
||||
BigInt('0x2F3DEDF8F1D64EF6'),
|
||||
BigInt('0x50531D30C1E3C78F'),
|
||||
BigInt('0xD1E00C6891BD5C04'),
|
||||
BigInt('0xAE8EFCA0A188D57D'),
|
||||
BigInt('0xE65F088B6997F879'),
|
||||
BigInt('0x9931F84359A27100'),
|
||||
BigInt('0x1882E91B09FCEA8B'),
|
||||
BigInt('0x67EC19D339C963F2'),
|
||||
BigInt('0xD75ADABD7A6E2D6F'),
|
||||
BigInt('0xA8342A754A5BA416'),
|
||||
BigInt('0x29873B2D1A053F9D'),
|
||||
BigInt('0x56E9CBE52A30B6E4'),
|
||||
BigInt('0x1E383FCEE22F9BE0'),
|
||||
BigInt('0x6156CF06D21A1299'),
|
||||
BigInt('0xE0E5DE5E82448912'),
|
||||
BigInt('0x9F8B2E96B271006B'),
|
||||
BigInt('0x71463609127AD31A'),
|
||||
BigInt('0x0E28C6C1224F5A63'),
|
||||
BigInt('0x8F9BD7997211C1E8'),
|
||||
BigInt('0xF0F5275142244891'),
|
||||
BigInt('0xB824D37A8A3B6595'),
|
||||
BigInt('0xC74A23B2BA0EECEC'),
|
||||
BigInt('0x46F932EAEA507767'),
|
||||
BigInt('0x3997C222DA65FE1E'),
|
||||
BigInt('0xAFBA2586F2D042EE'),
|
||||
BigInt('0xD0D4D54EC2E5CB97'),
|
||||
BigInt('0x5167C41692BB501C'),
|
||||
BigInt('0x2E0934DEA28ED965'),
|
||||
BigInt('0x66D8C0F56A91F461'),
|
||||
BigInt('0x19B6303D5AA47D18'),
|
||||
BigInt('0x980521650AFAE693'),
|
||||
BigInt('0xE76BD1AD3ACF6FEA'),
|
||||
BigInt('0x09A6C9329AC4BC9B'),
|
||||
BigInt('0x76C839FAAAF135E2'),
|
||||
BigInt('0xF77B28A2FAAFAE69'),
|
||||
BigInt('0x8815D86ACA9A2710'),
|
||||
BigInt('0xC0C42C4102850A14'),
|
||||
BigInt('0xBFAADC8932B0836D'),
|
||||
BigInt('0x3E19CDD162EE18E6'),
|
||||
BigInt('0x41773D1952DB919F'),
|
||||
BigInt('0x269B24CA6B12F26D'),
|
||||
BigInt('0x59F5D4025B277B14'),
|
||||
BigInt('0xD846C55A0B79E09F'),
|
||||
BigInt('0xA72835923B4C69E6'),
|
||||
BigInt('0xEFF9C1B9F35344E2'),
|
||||
BigInt('0x90973171C366CD9B'),
|
||||
BigInt('0x1124202993385610'),
|
||||
BigInt('0x6E4AD0E1A30DDF69'),
|
||||
BigInt('0x8087C87E03060C18'),
|
||||
BigInt('0xFFE938B633338561'),
|
||||
BigInt('0x7E5A29EE636D1EEA'),
|
||||
BigInt('0x0134D92653589793'),
|
||||
BigInt('0x49E52D0D9B47BA97'),
|
||||
BigInt('0x368BDDC5AB7233EE'),
|
||||
BigInt('0xB738CC9DFB2CA865'),
|
||||
BigInt('0xC8563C55CB19211C'),
|
||||
BigInt('0x5E7BDBF1E3AC9DEC'),
|
||||
BigInt('0x21152B39D3991495'),
|
||||
BigInt('0xA0A63A6183C78F1E'),
|
||||
BigInt('0xDFC8CAA9B3F20667'),
|
||||
BigInt('0x97193E827BED2B63'),
|
||||
BigInt('0xE877CE4A4BD8A21A'),
|
||||
BigInt('0x69C4DF121B863991'),
|
||||
BigInt('0x16AA2FDA2BB3B0E8'),
|
||||
BigInt('0xF86737458BB86399'),
|
||||
BigInt('0x8709C78DBB8DEAE0'),
|
||||
BigInt('0x06BAD6D5EBD3716B'),
|
||||
BigInt('0x79D4261DDBE6F812'),
|
||||
BigInt('0x3105D23613F9D516'),
|
||||
BigInt('0x4E6B22FE23CC5C6F'),
|
||||
BigInt('0xCFD833A67392C7E4'),
|
||||
BigInt('0xB0B6C36E43A74E9D'),
|
||||
BigInt('0x9A6C9329AC4BC9B5'),
|
||||
BigInt('0xE50263E19C7E40CC'),
|
||||
BigInt('0x64B172B9CC20DB47'),
|
||||
BigInt('0x1BDF8271FC15523E'),
|
||||
BigInt('0x530E765A340A7F3A'),
|
||||
BigInt('0x2C608692043FF643'),
|
||||
BigInt('0xADD397CA54616DC8'),
|
||||
BigInt('0xD2BD67026454E4B1'),
|
||||
BigInt('0x3C707F9DC45F37C0'),
|
||||
BigInt('0x431E8F55F46ABEB9'),
|
||||
BigInt('0xC2AD9E0DA4342532'),
|
||||
BigInt('0xBDC36EC59401AC4B'),
|
||||
BigInt('0xF5129AEE5C1E814F'),
|
||||
BigInt('0x8A7C6A266C2B0836'),
|
||||
BigInt('0x0BCF7B7E3C7593BD'),
|
||||
BigInt('0x74A18BB60C401AC4'),
|
||||
BigInt('0xE28C6C1224F5A634'),
|
||||
BigInt('0x9DE29CDA14C02F4D'),
|
||||
BigInt('0x1C518D82449EB4C6'),
|
||||
BigInt('0x633F7D4A74AB3DBF'),
|
||||
BigInt('0x2BEE8961BCB410BB'),
|
||||
BigInt('0x548079A98C8199C2'),
|
||||
BigInt('0xD53368F1DCDF0249'),
|
||||
BigInt('0xAA5D9839ECEA8B30'),
|
||||
BigInt('0x449080A64CE15841'),
|
||||
BigInt('0x3BFE706E7CD4D138'),
|
||||
BigInt('0xBA4D61362C8A4AB3'),
|
||||
BigInt('0xC52391FE1CBFC3CA'),
|
||||
BigInt('0x8DF265D5D4A0EECE'),
|
||||
BigInt('0xF29C951DE49567B7'),
|
||||
BigInt('0x732F8445B4CBFC3C'),
|
||||
BigInt('0x0C41748D84FE7545'),
|
||||
BigInt('0x6BAD6D5EBD3716B7'),
|
||||
BigInt('0x14C39D968D029FCE'),
|
||||
BigInt('0x95708CCEDD5C0445'),
|
||||
BigInt('0xEA1E7C06ED698D3C'),
|
||||
BigInt('0xA2CF882D2576A038'),
|
||||
BigInt('0xDDA178E515432941'),
|
||||
BigInt('0x5C1269BD451DB2CA'),
|
||||
BigInt('0x237C997575283BB3'),
|
||||
BigInt('0xCDB181EAD523E8C2'),
|
||||
BigInt('0xB2DF7122E51661BB'),
|
||||
BigInt('0x336C607AB548FA30'),
|
||||
BigInt('0x4C0290B2857D7349'),
|
||||
BigInt('0x04D364994D625E4D'),
|
||||
BigInt('0x7BBD94517D57D734'),
|
||||
BigInt('0xFA0E85092D094CBF'),
|
||||
BigInt('0x856075C11D3CC5C6'),
|
||||
BigInt('0x134D926535897936'),
|
||||
BigInt('0x6C2362AD05BCF04F'),
|
||||
BigInt('0xED9073F555E26BC4'),
|
||||
BigInt('0x92FE833D65D7E2BD'),
|
||||
BigInt('0xDA2F7716ADC8CFB9'),
|
||||
BigInt('0xA54187DE9DFD46C0'),
|
||||
BigInt('0x24F29686CDA3DD4B'),
|
||||
BigInt('0x5B9C664EFD965432'),
|
||||
BigInt('0xB5517ED15D9D8743'),
|
||||
BigInt('0xCA3F8E196DA80E3A'),
|
||||
BigInt('0x4B8C9F413DF695B1'),
|
||||
BigInt('0x34E26F890DC31CC8'),
|
||||
BigInt('0x7C339BA2C5DC31CC'),
|
||||
BigInt('0x035D6B6AF5E9B8B5'),
|
||||
BigInt('0x82EE7A32A5B7233E'),
|
||||
BigInt('0xFD808AFA9582AA47'),
|
||||
BigInt('0x4D364994D625E4DA'),
|
||||
BigInt('0x3258B95CE6106DA3'),
|
||||
BigInt('0xB3EBA804B64EF628'),
|
||||
BigInt('0xCC8558CC867B7F51'),
|
||||
BigInt('0x8454ACE74E645255'),
|
||||
BigInt('0xFB3A5C2F7E51DB2C'),
|
||||
BigInt('0x7A894D772E0F40A7'),
|
||||
BigInt('0x05E7BDBF1E3AC9DE'),
|
||||
BigInt('0xEB2AA520BE311AAF'),
|
||||
BigInt('0x944455E88E0493D6'),
|
||||
BigInt('0x15F744B0DE5A085D'),
|
||||
BigInt('0x6A99B478EE6F8124'),
|
||||
BigInt('0x224840532670AC20'),
|
||||
BigInt('0x5D26B09B16452559'),
|
||||
BigInt('0xDC95A1C3461BBED2'),
|
||||
BigInt('0xA3FB510B762E37AB'),
|
||||
BigInt('0x35D6B6AF5E9B8B5B'),
|
||||
BigInt('0x4AB846676EAE0222'),
|
||||
BigInt('0xCB0B573F3EF099A9'),
|
||||
BigInt('0xB465A7F70EC510D0'),
|
||||
BigInt('0xFCB453DCC6DA3DD4'),
|
||||
BigInt('0x83DAA314F6EFB4AD'),
|
||||
BigInt('0x0269B24CA6B12F26'),
|
||||
BigInt('0x7D0742849684A65F'),
|
||||
BigInt('0x93CA5A1B368F752E'),
|
||||
BigInt('0xECA4AAD306BAFC57'),
|
||||
BigInt('0x6D17BB8B56E467DC'),
|
||||
BigInt('0x12794B4366D1EEA5'),
|
||||
BigInt('0x5AA8BF68AECEC3A1'),
|
||||
BigInt('0x25C64FA09EFB4AD8'),
|
||||
BigInt('0xA4755EF8CEA5D153'),
|
||||
BigInt('0xDB1BAE30FE90582A'),
|
||||
BigInt('0xBCF7B7E3C7593BD8'),
|
||||
BigInt('0xC399472BF76CB2A1'),
|
||||
BigInt('0x422A5673A732292A'),
|
||||
BigInt('0x3D44A6BB9707A053'),
|
||||
BigInt('0x759552905F188D57'),
|
||||
BigInt('0x0AFBA2586F2D042E'),
|
||||
BigInt('0x8B48B3003F739FA5'),
|
||||
BigInt('0xF42643C80F4616DC'),
|
||||
BigInt('0x1AEB5B57AF4DC5AD'),
|
||||
BigInt('0x6585AB9F9F784CD4'),
|
||||
BigInt('0xE436BAC7CF26D75F'),
|
||||
BigInt('0x9B584A0FFF135E26'),
|
||||
BigInt('0xD389BE24370C7322'),
|
||||
BigInt('0xACE74EEC0739FA5B'),
|
||||
BigInt('0x2D545FB4576761D0'),
|
||||
BigInt('0x523AAF7C6752E8A9'),
|
||||
BigInt('0xC41748D84FE75459'),
|
||||
BigInt('0xBB79B8107FD2DD20'),
|
||||
BigInt('0x3ACAA9482F8C46AB'),
|
||||
BigInt('0x45A459801FB9CFD2'),
|
||||
BigInt('0x0D75ADABD7A6E2D6'),
|
||||
BigInt('0x721B5D63E7936BAF'),
|
||||
BigInt('0xF3A84C3BB7CDF024'),
|
||||
BigInt('0x8CC6BCF387F8795D'),
|
||||
BigInt('0x620BA46C27F3AA2C'),
|
||||
BigInt('0x1D6554A417C62355'),
|
||||
BigInt('0x9CD645FC4798B8DE'),
|
||||
BigInt('0xE3B8B53477AD31A7'),
|
||||
BigInt('0xAB69411FBFB21CA3'),
|
||||
BigInt('0xD407B1D78F8795DA'),
|
||||
BigInt('0x55B4A08FDFD90E51'),
|
||||
BigInt('0x2ADA5047EFEC8728')
|
||||
];
|
||||
class CRC64 {
|
||||
constructor() {
|
||||
this._crc = BigInt(0);
|
||||
}
|
||||
update(data) {
|
||||
const buffer = typeof data === 'string' ? Buffer.from(data) : data;
|
||||
let crc = CRC64.flip64Bits(this._crc);
|
||||
for (const dataByte of buffer) {
|
||||
const crcByte = Number(crc & BigInt(0xff));
|
||||
crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8));
|
||||
}
|
||||
this._crc = CRC64.flip64Bits(crc);
|
||||
}
|
||||
digest(encoding) {
|
||||
switch (encoding) {
|
||||
case 'hex':
|
||||
return this._crc.toString(16).toUpperCase();
|
||||
case 'base64':
|
||||
return this.toBuffer().toString('base64');
|
||||
default:
|
||||
return this.toBuffer();
|
||||
}
|
||||
}
|
||||
toBuffer() {
|
||||
return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff))));
|
||||
}
|
||||
static flip64Bits(n) {
|
||||
return (BigInt(1) << BigInt(64)) - BigInt(1) - n;
|
||||
}
|
||||
}
|
||||
exports.default = CRC64;
|
||||
//# sourceMappingURL=crc64.js.map
|
||||
1
node_modules/@actions/artifact/lib/internal/crc64.js.map
generated
vendored
Normal file
1
node_modules/@actions/artifact/lib/internal/crc64.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
4
node_modules/@actions/artifact/lib/internal/download-http-client.d.ts
generated
vendored
4
node_modules/@actions/artifact/lib/internal/download-http-client.d.ts
generated
vendored
@@ -1,7 +1,7 @@
|
||||
/// <reference types="node" />
|
||||
import * as fs from 'fs';
|
||||
import { ListArtifactsResponse, QueryArtifactResponse } from './contracts';
|
||||
import { IHttpClientResponse } from '@actions/http-client/interfaces';
|
||||
import { HttpClientResponse } from '@actions/http-client';
|
||||
import { DownloadItem } from './download-specification';
|
||||
export declare class DownloadHttpClient {
|
||||
private downloadHttpManager;
|
||||
@@ -35,5 +35,5 @@ export declare class DownloadHttpClient {
|
||||
* @param destinationStream the stream where the file should be written to
|
||||
* @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
|
||||
*/
|
||||
pipeResponseToFile(response: IHttpClientResponse, destinationStream: fs.WriteStream, isGzip: boolean): Promise<void>;
|
||||
pipeResponseToFile(response: HttpClientResponse, destinationStream: fs.WriteStream, isGzip: boolean): Promise<void>;
|
||||
}
|
||||
|
||||
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/artifact/lib/internal/http-manager.d.ts
generated
vendored
2
node_modules/@actions/artifact/lib/internal/http-manager.d.ts
generated
vendored
@@ -1,4 +1,4 @@
|
||||
import { HttpClient } from '@actions/http-client/index';
|
||||
import { HttpClient } from '@actions/http-client';
|
||||
/**
|
||||
* Used for managing http clients during either upload or download
|
||||
*/
|
||||
|
||||
6
node_modules/@actions/artifact/lib/internal/requestUtils.d.ts
generated
vendored
6
node_modules/@actions/artifact/lib/internal/requestUtils.d.ts
generated
vendored
@@ -1,3 +1,3 @@
|
||||
import { IHttpClientResponse } from '@actions/http-client/interfaces';
|
||||
export declare function retry(name: string, operation: () => Promise<IHttpClientResponse>, customErrorMessages: Map<number, string>, maxAttempts: number): Promise<IHttpClientResponse>;
|
||||
export declare function retryHttpClientRequest(name: string, method: () => Promise<IHttpClientResponse>, customErrorMessages?: Map<number, string>, maxAttempts?: number): Promise<IHttpClientResponse>;
|
||||
import { HttpClientResponse } from '@actions/http-client';
|
||||
export declare function retry(name: string, operation: () => Promise<HttpClientResponse>, customErrorMessages: Map<number, string>, maxAttempts: number): Promise<HttpClientResponse>;
|
||||
export declare function retryHttpClientRequest(name: string, method: () => Promise<HttpClientResponse>, customErrorMessages?: Map<number, string>, maxAttempts?: number): Promise<HttpClientResponse>;
|
||||
|
||||
2
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/requestUtils.js.map
generated
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA6C,EAC7C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAoC,SAAS,CAAA;QACzD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,2BAAmB,CAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,6BAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,aAAK,CAAC,6CAAqC,CAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAA0C,EAC1C,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,gCAAa,EAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"}
|
||||
{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,mCAMgB;AAChB,oDAAqC;AACrC,yDAAgD;AAEhD,SAAsB,KAAK,CACzB,IAAY,EACZ,SAA4C,EAC5C,mBAAwC,EACxC,WAAmB;;QAEnB,IAAI,QAAQ,GAAmC,SAAS,CAAA;QACxD,IAAI,UAAU,GAAuB,SAAS,CAAA;QAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;QACvB,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,sBAAsB,GAAuB,SAAS,CAAA;QAC1D,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI;gBACF,QAAQ,GAAG,MAAM,SAAS,EAAE,CAAA;gBAC5B,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAA;gBAExC,IAAI,2BAAmB,CAAC,UAAU,CAAC,EAAE;oBACnC,OAAO,QAAQ,CAAA;iBAChB;gBAED,uFAAuF;gBACvF,IAAI,UAAU,EAAE;oBACd,sBAAsB,GAAG,mBAAmB,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;iBAC7D;gBAED,WAAW,GAAG,6BAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,mCAAmC,UAAU,EAAE,CAAA;aAC/D;YAAC,OAAO,KAAK,EAAE;gBACd,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC7C,IAAI,QAAQ,EAAE;oBACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;iBACjC;gBACD,MAAK;aACN;YAED,IAAI,CAAC,IAAI,CACP,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,MAAM,aAAK,CAAC,6CAAqC,CAAC,OAAO,CAAC,CAAC,CAAA;YAC3D,OAAO,EAAE,CAAA;SACV;QAED,IAAI,QAAQ,EAAE;YACZ,8BAAsB,CAAC,QAAQ,CAAC,CAAA;SACjC;QAED,IAAI,sBAAsB,EAAE;YAC1B,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,sBAAsB,EAAE,CAAC,CAAA;SACzD;QACD,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AA1DD,sBA0DC;AAED,SAAsB,sBAAsB,CAC1C,IAAY,EACZ,MAAyC,EACzC,sBAA2C,IAAI,GAAG,EAAE,EACpD,WAAW,GAAG,gCAAa,EAAE;;QAE7B,OAAO,MAAM,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,mBAAmB,EAAE,WAAW,CAAC,CAAA;IACpE,CAAC;CAAA;AAPD,wDAOC"}
|
||||
4
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
4
node_modules/@actions/artifact/lib/internal/upload-http-client.js
generated
vendored
@@ -299,8 +299,10 @@ class UploadHttpClient {
|
||||
*/
|
||||
uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// open a new stream and read it to compute the digest
|
||||
const digest = yield utils_1.digestForStream(openStream());
|
||||
// prepare all the necessary headers before making any http call
|
||||
const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||||
const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest);
|
||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
|
||||
|
||||
2
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/upload-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
16
node_modules/@actions/artifact/lib/internal/utils.d.ts
generated
vendored
16
node_modules/@actions/artifact/lib/internal/utils.d.ts
generated
vendored
@@ -1,7 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
import { HttpClient } from '@actions/http-client';
|
||||
import { IHeaders, IHttpClientResponse } from '@actions/http-client/interfaces';
|
||||
import { IncomingHttpHeaders } from 'http';
|
||||
import { IncomingHttpHeaders, OutgoingHttpHeaders } from 'http';
|
||||
import { HttpClient, HttpClientResponse } from '@actions/http-client';
|
||||
/**
|
||||
* Returns a retry time in milliseconds that exponentially gets larger
|
||||
* depending on the amount of retries that have been attempted
|
||||
@@ -34,7 +33,7 @@ export declare function getContentRange(start: number, end: number, total: numbe
|
||||
* @param {string} acceptType the type of content that we can accept
|
||||
* @returns appropriate headers to make a specific http call during artifact download
|
||||
*/
|
||||
export declare function getDownloadHeaders(contentType: string, isKeepAlive?: boolean, acceptGzip?: boolean): IHeaders;
|
||||
export declare function getDownloadHeaders(contentType: string, isKeepAlive?: boolean, acceptGzip?: boolean): OutgoingHttpHeaders;
|
||||
/**
|
||||
* Sets all the necessary headers when uploading an artifact
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
@@ -45,7 +44,7 @@ export declare function getDownloadHeaders(contentType: string, isKeepAlive?: bo
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate headers to make a specific http call during artifact upload
|
||||
*/
|
||||
export declare function getUploadHeaders(contentType: string, isKeepAlive?: boolean, isGzip?: boolean, uncompressedLength?: number, contentLength?: number, contentRange?: string): IHeaders;
|
||||
export declare function getUploadHeaders(contentType: string, isKeepAlive?: boolean, isGzip?: boolean, uncompressedLength?: number, contentLength?: number, contentRange?: string, digest?: StreamDigest): OutgoingHttpHeaders;
|
||||
export declare function createHttpClient(userAgent: string): HttpClient;
|
||||
export declare function getArtifactUrl(): string;
|
||||
/**
|
||||
@@ -57,10 +56,15 @@ export declare function getArtifactUrl(): string;
|
||||
* Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided.
|
||||
* Other information such as the headers, the response code and message might be useful, so this is displayed.
|
||||
*/
|
||||
export declare function displayHttpDiagnostics(response: IHttpClientResponse): void;
|
||||
export declare function displayHttpDiagnostics(response: HttpClientResponse): void;
|
||||
export declare function createDirectoriesForArtifact(directories: string[]): Promise<void>;
|
||||
export declare function createEmptyFilesForArtifact(emptyFilesToCreate: string[]): Promise<void>;
|
||||
export declare function getFileSize(filePath: string): Promise<number>;
|
||||
export declare function rmFile(filePath: string): Promise<void>;
|
||||
export declare function getProperRetention(retentionInput: number, retentionSetting: string | undefined): number;
|
||||
export declare function sleep(milliseconds: number): Promise<void>;
|
||||
export interface StreamDigest {
|
||||
crc64: string;
|
||||
md5: string;
|
||||
}
|
||||
export declare function digestForStream(stream: NodeJS.ReadableStream): Promise<StreamDigest>;
|
||||
|
||||
36
node_modules/@actions/artifact/lib/internal/utils.js
generated
vendored
36
node_modules/@actions/artifact/lib/internal/utils.js
generated
vendored
@@ -8,13 +8,18 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0;
|
||||
const core_1 = require("@actions/core");
|
||||
exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0;
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const fs_1 = require("fs");
|
||||
const core_1 = require("@actions/core");
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/auth");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
const config_variables_1 = require("./config-variables");
|
||||
const crc64_1 = __importDefault(require("./crc64"));
|
||||
/**
|
||||
* Returns a retry time in milliseconds that exponentially gets larger
|
||||
* depending on the amount of retries that have been attempted
|
||||
@@ -155,7 +160,7 @@ exports.getDownloadHeaders = getDownloadHeaders;
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate headers to make a specific http call during artifact upload
|
||||
*/
|
||||
function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||||
function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) {
|
||||
const requestOptions = {};
|
||||
requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`;
|
||||
if (contentType) {
|
||||
@@ -176,6 +181,10 @@ function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength,
|
||||
if (contentRange) {
|
||||
requestOptions['Content-Range'] = contentRange;
|
||||
}
|
||||
if (digest) {
|
||||
requestOptions['x-actions-results-crc64'] = digest.crc64;
|
||||
requestOptions['x-actions-results-md5'] = digest.md5;
|
||||
}
|
||||
return requestOptions;
|
||||
}
|
||||
exports.getUploadHeaders = getUploadHeaders;
|
||||
@@ -261,4 +270,23 @@ function sleep(milliseconds) {
|
||||
});
|
||||
}
|
||||
exports.sleep = sleep;
|
||||
function digestForStream(stream) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve, reject) => {
|
||||
const crc64 = new crc64_1.default();
|
||||
const md5 = crypto_1.default.createHash('md5');
|
||||
stream
|
||||
.on('data', data => {
|
||||
crc64.update(data);
|
||||
md5.update(data);
|
||||
})
|
||||
.on('end', () => resolve({
|
||||
crc64: crc64.digest('base64'),
|
||||
md5: md5.digest('base64')
|
||||
}))
|
||||
.on('error', reject);
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.digestForStream = digestForStream;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
2
node_modules/@actions/artifact/lib/internal/utils.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/utils.js.map
generated
vendored
File diff suppressed because one or more lines are too long
21
node_modules/@actions/artifact/node_modules/@actions/http-client/LICENSE
generated
vendored
21
node_modules/@actions/artifact/node_modules/@actions/http-client/LICENSE
generated
vendored
@@ -1,21 +0,0 @@
|
||||
Actions Http Client for Node.js
|
||||
|
||||
Copyright (c) GitHub, Inc.
|
||||
|
||||
All rights reserved.
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
|
||||
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
79
node_modules/@actions/artifact/node_modules/@actions/http-client/README.md
generated
vendored
79
node_modules/@actions/artifact/node_modules/@actions/http-client/README.md
generated
vendored
@@ -1,79 +0,0 @@
|
||||
|
||||
<p align="center">
|
||||
<img src="actions.png">
|
||||
</p>
|
||||
|
||||
# Actions Http-Client
|
||||
|
||||
[](https://github.com/actions/http-client/actions)
|
||||
|
||||
A lightweight HTTP client optimized for use with actions, TypeScript with generics and async await.
|
||||
|
||||
## Features
|
||||
|
||||
- HTTP client with TypeScript generics and async/await/Promises
|
||||
- Typings included so no need to acquire separately (great for intellisense and no versioning drift)
|
||||
- [Proxy support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners#using-a-proxy-server-with-self-hosted-runners) just works with actions and the runner
|
||||
- Targets ES2019 (runner runs actions with node 12+). Only supported on node 12+.
|
||||
- Basic, Bearer and PAT Support out of the box. Extensible handlers for others.
|
||||
- Redirects supported
|
||||
|
||||
Features and releases [here](./RELEASES.md)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
npm install @actions/http-client --save
|
||||
```
|
||||
|
||||
## Samples
|
||||
|
||||
See the [HTTP](./__tests__) tests for detailed examples.
|
||||
|
||||
## Errors
|
||||
|
||||
### HTTP
|
||||
|
||||
The HTTP client does not throw unless truly exceptional.
|
||||
|
||||
* A request that successfully executes resulting in a 404, 500 etc... will return a response object with a status code and a body.
|
||||
* Redirects (3xx) will be followed by default.
|
||||
|
||||
See [HTTP tests](./__tests__) for detailed examples.
|
||||
|
||||
## Debugging
|
||||
|
||||
To enable detailed console logging of all HTTP requests and responses, set the NODE_DEBUG environment varible:
|
||||
|
||||
```
|
||||
export NODE_DEBUG=http
|
||||
```
|
||||
|
||||
## Node support
|
||||
|
||||
The http-client is built using the latest LTS version of Node 12. It may work on previous node LTS versions but it's tested and officially supported on Node12+.
|
||||
|
||||
## Support and Versioning
|
||||
|
||||
We follow semver and will hold compatibility between major versions and increment the minor version with new features and capabilities (while holding compat).
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome PRs. Please create an issue and if applicable, a design before proceeding with code.
|
||||
|
||||
once:
|
||||
|
||||
```bash
|
||||
$ npm install
|
||||
```
|
||||
|
||||
To build:
|
||||
|
||||
```bash
|
||||
$ npm run build
|
||||
```
|
||||
|
||||
To run all tests:
|
||||
```bash
|
||||
$ npm test
|
||||
```
|
||||
26
node_modules/@actions/artifact/node_modules/@actions/http-client/RELEASES.md
generated
vendored
26
node_modules/@actions/artifact/node_modules/@actions/http-client/RELEASES.md
generated
vendored
@@ -1,26 +0,0 @@
|
||||
## Releases
|
||||
|
||||
## 1.0.10
|
||||
|
||||
Contains a bug fix where proxy is defined without a user and password. see [PR here](https://github.com/actions/http-client/pull/42)
|
||||
|
||||
## 1.0.9
|
||||
Throw HttpClientError instead of a generic Error from the \<verb>Json() helper methods when the server responds with a non-successful status code.
|
||||
|
||||
## 1.0.8
|
||||
Fixed security issue where a redirect (e.g. 302) to another domain would pass headers. The fix was to strip the authorization header if the hostname was different. More [details in PR #27](https://github.com/actions/http-client/pull/27)
|
||||
|
||||
## 1.0.7
|
||||
Update NPM dependencies and add 429 to the list of HttpCodes
|
||||
|
||||
## 1.0.6
|
||||
Automatically sends Content-Type and Accept application/json headers for \<verb>Json() helper methods if not set in the client or parameters.
|
||||
|
||||
## 1.0.5
|
||||
Adds \<verb>Json() helper methods for json over http scenarios.
|
||||
|
||||
## 1.0.4
|
||||
Started to add \<verb>Json() helper methods. Do not use this release for that. Use >= 1.0.5 since there was an issue with types.
|
||||
|
||||
## 1.0.1 to 1.0.3
|
||||
Adds proxy support.
|
||||
BIN
node_modules/@actions/artifact/node_modules/@actions/http-client/actions.png
generated
vendored
BIN
node_modules/@actions/artifact/node_modules/@actions/http-client/actions.png
generated
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 33 KiB |
23
node_modules/@actions/artifact/node_modules/@actions/http-client/auth.d.ts
generated
vendored
23
node_modules/@actions/artifact/node_modules/@actions/http-client/auth.d.ts
generated
vendored
@@ -1,23 +0,0 @@
|
||||
import ifm = require('./interfaces');
|
||||
export declare class BasicCredentialHandler implements ifm.IRequestHandler {
|
||||
username: string;
|
||||
password: string;
|
||||
constructor(username: string, password: string);
|
||||
prepareRequest(options: any): void;
|
||||
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||
}
|
||||
export declare class BearerCredentialHandler implements ifm.IRequestHandler {
|
||||
token: string;
|
||||
constructor(token: string);
|
||||
prepareRequest(options: any): void;
|
||||
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||
}
|
||||
export declare class PersonalAccessTokenCredentialHandler implements ifm.IRequestHandler {
|
||||
token: string;
|
||||
constructor(token: string);
|
||||
prepareRequest(options: any): void;
|
||||
canHandleAuthentication(response: ifm.IHttpClientResponse): boolean;
|
||||
handleAuthentication(httpClient: ifm.IHttpClient, requestInfo: ifm.IRequestInfo, objs: any): Promise<ifm.IHttpClientResponse>;
|
||||
}
|
||||
58
node_modules/@actions/artifact/node_modules/@actions/http-client/auth.js
generated
vendored
58
node_modules/@actions/artifact/node_modules/@actions/http-client/auth.js
generated
vendored
@@ -1,58 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
class BasicCredentialHandler {
|
||||
constructor(username, password) {
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
return false;
|
||||
}
|
||||
handleAuthentication(httpClient, requestInfo, objs) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
exports.BasicCredentialHandler = BasicCredentialHandler;
|
||||
class BearerCredentialHandler {
|
||||
constructor(token) {
|
||||
this.token = token;
|
||||
}
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] = 'Bearer ' + this.token;
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
return false;
|
||||
}
|
||||
handleAuthentication(httpClient, requestInfo, objs) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
exports.BearerCredentialHandler = BearerCredentialHandler;
|
||||
class PersonalAccessTokenCredentialHandler {
|
||||
constructor(token) {
|
||||
this.token = token;
|
||||
}
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
return false;
|
||||
}
|
||||
handleAuthentication(httpClient, requestInfo, objs) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
|
||||
124
node_modules/@actions/artifact/node_modules/@actions/http-client/index.d.ts
generated
vendored
124
node_modules/@actions/artifact/node_modules/@actions/http-client/index.d.ts
generated
vendored
@@ -1,124 +0,0 @@
|
||||
/// <reference types="node" />
|
||||
import http = require('http');
|
||||
import ifm = require('./interfaces');
|
||||
export declare enum HttpCodes {
|
||||
OK = 200,
|
||||
MultipleChoices = 300,
|
||||
MovedPermanently = 301,
|
||||
ResourceMoved = 302,
|
||||
SeeOther = 303,
|
||||
NotModified = 304,
|
||||
UseProxy = 305,
|
||||
SwitchProxy = 306,
|
||||
TemporaryRedirect = 307,
|
||||
PermanentRedirect = 308,
|
||||
BadRequest = 400,
|
||||
Unauthorized = 401,
|
||||
PaymentRequired = 402,
|
||||
Forbidden = 403,
|
||||
NotFound = 404,
|
||||
MethodNotAllowed = 405,
|
||||
NotAcceptable = 406,
|
||||
ProxyAuthenticationRequired = 407,
|
||||
RequestTimeout = 408,
|
||||
Conflict = 409,
|
||||
Gone = 410,
|
||||
TooManyRequests = 429,
|
||||
InternalServerError = 500,
|
||||
NotImplemented = 501,
|
||||
BadGateway = 502,
|
||||
ServiceUnavailable = 503,
|
||||
GatewayTimeout = 504
|
||||
}
|
||||
export declare enum Headers {
|
||||
Accept = "accept",
|
||||
ContentType = "content-type"
|
||||
}
|
||||
export declare enum MediaTypes {
|
||||
ApplicationJson = "application/json"
|
||||
}
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
export declare function getProxyUrl(serverUrl: string): string;
|
||||
export declare class HttpClientError extends Error {
|
||||
constructor(message: string, statusCode: number);
|
||||
statusCode: number;
|
||||
result?: any;
|
||||
}
|
||||
export declare class HttpClientResponse implements ifm.IHttpClientResponse {
|
||||
constructor(message: http.IncomingMessage);
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
}
|
||||
export declare function isHttps(requestUrl: string): boolean;
|
||||
export declare class HttpClient {
|
||||
userAgent: string | undefined;
|
||||
handlers: ifm.IRequestHandler[];
|
||||
requestOptions: ifm.IRequestOptions;
|
||||
private _ignoreSslError;
|
||||
private _socketTimeout;
|
||||
private _allowRedirects;
|
||||
private _allowRedirectDowngrade;
|
||||
private _maxRedirects;
|
||||
private _allowRetries;
|
||||
private _maxRetries;
|
||||
private _agent;
|
||||
private _proxyAgent;
|
||||
private _keepAlive;
|
||||
private _disposed;
|
||||
constructor(userAgent?: string, handlers?: ifm.IRequestHandler[], requestOptions?: ifm.IRequestOptions);
|
||||
options(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
get(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
del(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
post(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
patch(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
put(requestUrl: string, data: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
head(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
/**
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
getJson<T>(requestUrl: string, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
postJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
putJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
patchJson<T>(requestUrl: string, obj: any, additionalHeaders?: ifm.IHeaders): Promise<ifm.ITypedResponse<T>>;
|
||||
/**
|
||||
* Makes a raw http request.
|
||||
* All other methods such as get, post, patch, and request ultimately call this.
|
||||
* Prefer get, del, post and patch
|
||||
*/
|
||||
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: ifm.IHeaders): Promise<ifm.IHttpClientResponse>;
|
||||
/**
|
||||
* Needs to be called if keepAlive is set to true in request options.
|
||||
*/
|
||||
dispose(): void;
|
||||
/**
|
||||
* Raw request.
|
||||
* @param info
|
||||
* @param data
|
||||
*/
|
||||
requestRaw(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream): Promise<ifm.IHttpClientResponse>;
|
||||
/**
|
||||
* Raw request with callback.
|
||||
* @param info
|
||||
* @param data
|
||||
* @param onResult
|
||||
*/
|
||||
requestRawWithCallback(info: ifm.IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: ifm.IHttpClientResponse) => void): void;
|
||||
/**
|
||||
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
getAgent(serverUrl: string): http.Agent;
|
||||
private _prepareRequest;
|
||||
private _mergeHeaders;
|
||||
private _getExistingOrDefaultHeader;
|
||||
private _getAgent;
|
||||
private _performExponentialBackoff;
|
||||
private static dateTimeDeserializer;
|
||||
private _processResponse;
|
||||
}
|
||||
537
node_modules/@actions/artifact/node_modules/@actions/http-client/index.js
generated
vendored
537
node_modules/@actions/artifact/node_modules/@actions/http-client/index.js
generated
vendored
@@ -1,537 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const http = require("http");
|
||||
const https = require("https");
|
||||
const pm = require("./proxy");
|
||||
let tunnel;
|
||||
var HttpCodes;
|
||||
(function (HttpCodes) {
|
||||
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
||||
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
||||
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
||||
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
||||
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
||||
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
||||
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
||||
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
||||
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
||||
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
||||
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
||||
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
||||
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
||||
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
||||
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
||||
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
||||
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
||||
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
||||
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
||||
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
||||
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
||||
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
||||
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
||||
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
||||
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
||||
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
||||
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
||||
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
||||
var Headers;
|
||||
(function (Headers) {
|
||||
Headers["Accept"] = "accept";
|
||||
Headers["ContentType"] = "content-type";
|
||||
})(Headers = exports.Headers || (exports.Headers = {}));
|
||||
var MediaTypes;
|
||||
(function (MediaTypes) {
|
||||
MediaTypes["ApplicationJson"] = "application/json";
|
||||
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
||||
/**
|
||||
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
function getProxyUrl(serverUrl) {
|
||||
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
class HttpClientError extends Error {
|
||||
constructor(message, statusCode) {
|
||||
super(message);
|
||||
this.name = 'HttpClientError';
|
||||
this.statusCode = statusCode;
|
||||
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||
}
|
||||
}
|
||||
exports.HttpClientError = HttpClientError;
|
||||
class HttpClientResponse {
|
||||
constructor(message) {
|
||||
this.message = message;
|
||||
}
|
||||
readBody() {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
let output = Buffer.alloc(0);
|
||||
this.message.on('data', (chunk) => {
|
||||
output = Buffer.concat([output, chunk]);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(output.toString());
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
return parsedUrl.protocol === 'https:';
|
||||
}
|
||||
exports.isHttps = isHttps;
|
||||
class HttpClient {
|
||||
constructor(userAgent, handlers, requestOptions) {
|
||||
this._ignoreSslError = false;
|
||||
this._allowRedirects = true;
|
||||
this._allowRedirectDowngrade = false;
|
||||
this._maxRedirects = 50;
|
||||
this._allowRetries = false;
|
||||
this._maxRetries = 1;
|
||||
this._keepAlive = false;
|
||||
this._disposed = false;
|
||||
this.userAgent = userAgent;
|
||||
this.handlers = handlers || [];
|
||||
this.requestOptions = requestOptions;
|
||||
if (requestOptions) {
|
||||
if (requestOptions.ignoreSslError != null) {
|
||||
this._ignoreSslError = requestOptions.ignoreSslError;
|
||||
}
|
||||
this._socketTimeout = requestOptions.socketTimeout;
|
||||
if (requestOptions.allowRedirects != null) {
|
||||
this._allowRedirects = requestOptions.allowRedirects;
|
||||
}
|
||||
if (requestOptions.allowRedirectDowngrade != null) {
|
||||
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
||||
}
|
||||
if (requestOptions.maxRedirects != null) {
|
||||
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
||||
}
|
||||
if (requestOptions.keepAlive != null) {
|
||||
this._keepAlive = requestOptions.keepAlive;
|
||||
}
|
||||
if (requestOptions.allowRetries != null) {
|
||||
this._allowRetries = requestOptions.allowRetries;
|
||||
}
|
||||
if (requestOptions.maxRetries != null) {
|
||||
this._maxRetries = requestOptions.maxRetries;
|
||||
}
|
||||
}
|
||||
}
|
||||
options(requestUrl, additionalHeaders) {
|
||||
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
||||
}
|
||||
get(requestUrl, additionalHeaders) {
|
||||
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
||||
}
|
||||
del(requestUrl, additionalHeaders) {
|
||||
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
||||
}
|
||||
post(requestUrl, data, additionalHeaders) {
|
||||
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
||||
}
|
||||
patch(requestUrl, data, additionalHeaders) {
|
||||
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
||||
}
|
||||
put(requestUrl, data, additionalHeaders) {
|
||||
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
||||
}
|
||||
head(requestUrl, additionalHeaders) {
|
||||
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
||||
}
|
||||
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
||||
return this.request(verb, requestUrl, stream, additionalHeaders);
|
||||
}
|
||||
/**
|
||||
* Gets a typed object from an endpoint
|
||||
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
||||
*/
|
||||
async getJson(requestUrl, additionalHeaders = {}) {
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
let res = await this.get(requestUrl, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.post(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.put(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
||||
let data = JSON.stringify(obj, null, 2);
|
||||
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
||||
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
||||
let res = await this.patch(requestUrl, data, additionalHeaders);
|
||||
return this._processResponse(res, this.requestOptions);
|
||||
}
|
||||
/**
|
||||
* Makes a raw http request.
|
||||
* All other methods such as get, post, patch, and request ultimately call this.
|
||||
* Prefer get, del, post and patch
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
}
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
authenticationHandler = this.handlers[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (authenticationHandler) {
|
||||
return authenticationHandler.handleAuthentication(this, info, data);
|
||||
}
|
||||
else {
|
||||
// We have received an unauthorized response but have no handlers to handle it.
|
||||
// Let the response return to the caller.
|
||||
return response;
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = new URL(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
redirectsRemaining--;
|
||||
}
|
||||
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
||||
// If not a retry code, return immediately instead of retrying
|
||||
return response;
|
||||
}
|
||||
numTries += 1;
|
||||
if (numTries < maxTries) {
|
||||
await response.readBody();
|
||||
await this._performExponentialBackoff(numTries);
|
||||
}
|
||||
}
|
||||
return response;
|
||||
}
|
||||
/**
|
||||
* Needs to be called if keepAlive is set to true in request options.
|
||||
*/
|
||||
dispose() {
|
||||
if (this._agent) {
|
||||
this._agent.destroy();
|
||||
}
|
||||
this._disposed = true;
|
||||
}
|
||||
/**
|
||||
* Raw request.
|
||||
* @param info
|
||||
* @param data
|
||||
*/
|
||||
requestRaw(info, data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let callbackForResult = function (err, res) {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
resolve(res);
|
||||
};
|
||||
this.requestRawWithCallback(info, data, callbackForResult);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Raw request with callback.
|
||||
* @param info
|
||||
* @param data
|
||||
* @param onResult
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
if (!callbackCalled) {
|
||||
callbackCalled = true;
|
||||
onResult(err, res);
|
||||
}
|
||||
};
|
||||
let req = info.httpModule.request(info.options, (msg) => {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', sock => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
||||
if (socket) {
|
||||
socket.end();
|
||||
}
|
||||
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
||||
});
|
||||
req.on('error', function (err) {
|
||||
// err has statusCode property
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof data === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof data !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
data.pipe(req);
|
||||
}
|
||||
else {
|
||||
req.end();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Gets an http agent. This function is useful when you need an http agent that handles
|
||||
* routing through a proxy server - depending upon the url and proxy environment variables.
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
getAgent(serverUrl) {
|
||||
let parsedUrl = new URL(serverUrl);
|
||||
return this._getAgent(parsedUrl);
|
||||
}
|
||||
_prepareRequest(method, requestUrl, headers) {
|
||||
const info = {};
|
||||
info.parsedUrl = requestUrl;
|
||||
const usingSsl = info.parsedUrl.protocol === 'https:';
|
||||
info.httpModule = usingSsl ? https : http;
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach(handler => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
}
|
||||
return additionalHeaders[header] || clientHeader || _default;
|
||||
}
|
||||
_getAgent(parsedUrl) {
|
||||
let agent;
|
||||
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
||||
let useProxy = proxyUrl && proxyUrl.hostname;
|
||||
if (this._keepAlive && useProxy) {
|
||||
agent = this._proxyAgent;
|
||||
}
|
||||
if (this._keepAlive && !useProxy) {
|
||||
agent = this._agent;
|
||||
}
|
||||
// if agent is already assigned use that agent.
|
||||
if (!!agent) {
|
||||
return agent;
|
||||
}
|
||||
const usingSsl = parsedUrl.protocol === 'https:';
|
||||
let maxSockets = 100;
|
||||
if (!!this.requestOptions) {
|
||||
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
||||
}
|
||||
if (useProxy) {
|
||||
// If using proxy, need tunnel
|
||||
if (!tunnel) {
|
||||
tunnel = require('tunnel');
|
||||
}
|
||||
const agentOptions = {
|
||||
maxSockets: maxSockets,
|
||||
keepAlive: this._keepAlive,
|
||||
proxy: {
|
||||
...((proxyUrl.username || proxyUrl.password) && {
|
||||
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
|
||||
}),
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
if (usingSsl) {
|
||||
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
||||
}
|
||||
else {
|
||||
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
||||
}
|
||||
agent = tunnelAgent(agentOptions);
|
||||
this._proxyAgent = agent;
|
||||
}
|
||||
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
||||
if (this._keepAlive && !agent) {
|
||||
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
||||
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
||||
this._agent = agent;
|
||||
}
|
||||
// if not using private agent and tunnel agent isn't setup then use global agent
|
||||
if (!agent) {
|
||||
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
||||
}
|
||||
if (usingSsl && this._ignoreSslError) {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
_performExponentialBackoff(retryNumber) {
|
||||
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
||||
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
||||
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
||||
}
|
||||
static dateTimeDeserializer(key, value) {
|
||||
if (typeof value === 'string') {
|
||||
let a = new Date(value);
|
||||
if (!isNaN(a.valueOf())) {
|
||||
return a;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
async _processResponse(res, options) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const statusCode = res.message.statusCode;
|
||||
const response = {
|
||||
statusCode: statusCode,
|
||||
result: null,
|
||||
headers: {}
|
||||
};
|
||||
// not found leads to null obj returned
|
||||
if (statusCode == HttpCodes.NotFound) {
|
||||
resolve(response);
|
||||
}
|
||||
let obj;
|
||||
let contents;
|
||||
// get the result from the body
|
||||
try {
|
||||
contents = await res.readBody();
|
||||
if (contents && contents.length > 0) {
|
||||
if (options && options.deserializeDates) {
|
||||
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
||||
}
|
||||
else {
|
||||
obj = JSON.parse(contents);
|
||||
}
|
||||
response.result = obj;
|
||||
}
|
||||
response.headers = res.message.headers;
|
||||
}
|
||||
catch (err) {
|
||||
// Invalid resource (contents not json); leaving result obj null
|
||||
}
|
||||
// note that 3xx redirects are handled by the http layer.
|
||||
if (statusCode > 299) {
|
||||
let msg;
|
||||
// if exception/error in body, attempt to get better error
|
||||
if (obj && obj.message) {
|
||||
msg = obj.message;
|
||||
}
|
||||
else if (contents && contents.length > 0) {
|
||||
// it may be the case that the exception is in the body message as string
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
}
|
||||
let err = new HttpClientError(msg, statusCode);
|
||||
err.result = response.result;
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve(response);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClient = HttpClient;
|
||||
49
node_modules/@actions/artifact/node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
49
node_modules/@actions/artifact/node_modules/@actions/http-client/interfaces.d.ts
generated
vendored
@@ -1,49 +0,0 @@
|
||||
/// <reference types="node" />
|
||||
import http = require('http');
|
||||
export interface IHeaders {
|
||||
[key: string]: any;
|
||||
}
|
||||
export interface IHttpClient {
|
||||
options(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
get(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
del(requestUrl: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
post(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
patch(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
put(requestUrl: string, data: string, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
sendStream(verb: string, requestUrl: string, stream: NodeJS.ReadableStream, additionalHeaders?: IHeaders): Promise<IHttpClientResponse>;
|
||||
request(verb: string, requestUrl: string, data: string | NodeJS.ReadableStream, headers: IHeaders): Promise<IHttpClientResponse>;
|
||||
requestRaw(info: IRequestInfo, data: string | NodeJS.ReadableStream): Promise<IHttpClientResponse>;
|
||||
requestRawWithCallback(info: IRequestInfo, data: string | NodeJS.ReadableStream, onResult: (err: any, res: IHttpClientResponse) => void): void;
|
||||
}
|
||||
export interface IRequestHandler {
|
||||
prepareRequest(options: http.RequestOptions): void;
|
||||
canHandleAuthentication(response: IHttpClientResponse): boolean;
|
||||
handleAuthentication(httpClient: IHttpClient, requestInfo: IRequestInfo, objs: any): Promise<IHttpClientResponse>;
|
||||
}
|
||||
export interface IHttpClientResponse {
|
||||
message: http.IncomingMessage;
|
||||
readBody(): Promise<string>;
|
||||
}
|
||||
export interface IRequestInfo {
|
||||
options: http.RequestOptions;
|
||||
parsedUrl: URL;
|
||||
httpModule: any;
|
||||
}
|
||||
export interface IRequestOptions {
|
||||
headers?: IHeaders;
|
||||
socketTimeout?: number;
|
||||
ignoreSslError?: boolean;
|
||||
allowRedirects?: boolean;
|
||||
allowRedirectDowngrade?: boolean;
|
||||
maxRedirects?: number;
|
||||
maxSockets?: number;
|
||||
keepAlive?: boolean;
|
||||
deserializeDates?: boolean;
|
||||
allowRetries?: boolean;
|
||||
maxRetries?: number;
|
||||
}
|
||||
export interface ITypedResponse<T> {
|
||||
statusCode: number;
|
||||
result: T | null;
|
||||
headers: Object;
|
||||
}
|
||||
2
node_modules/@actions/artifact/node_modules/@actions/http-client/interfaces.js
generated
vendored
2
node_modules/@actions/artifact/node_modules/@actions/http-client/interfaces.js
generated
vendored
@@ -1,2 +0,0 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user