mirror of
https://github.com/github/codeql-action.git
synced 2025-12-24 16:20:09 +08:00
Compare commits
106 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
67a35a0858 | ||
|
|
57571ab0cd | ||
|
|
066b6343ef | ||
|
|
aefd9896b1 | ||
|
|
3ca2260643 | ||
|
|
5f20b2c372 | ||
|
|
760583e70d | ||
|
|
0ef7eda548 | ||
|
|
86128131fa | ||
|
|
ebbe965b43 | ||
|
|
3cbd063679 | ||
|
|
b4fba292aa | ||
|
|
b55762b0a6 | ||
|
|
91fb7b5c11 | ||
|
|
978bc50f9b | ||
|
|
fb75ebd750 | ||
|
|
a2527628e8 | ||
|
|
16b3d998b4 | ||
|
|
98a90dcce7 | ||
|
|
d23060145b | ||
|
|
bd8fec7f9f | ||
|
|
6a8522e57e | ||
|
|
168b99b3c2 | ||
|
|
bc7318da91 | ||
|
|
433fe88bf3 | ||
|
|
c208575433 | ||
|
|
b8ea587211 | ||
|
|
65f42e3768 | ||
|
|
d9ceda3823 | ||
|
|
19f00dc212 | ||
|
|
ec298233c1 | ||
|
|
a92a14621b | ||
|
|
204eadab9d | ||
|
|
0e50a19ce3 | ||
|
|
4775eab92e | ||
|
|
16964e90ba | ||
|
|
74cbab4958 | ||
|
|
b36480d849 | ||
|
|
53f80edaf6 | ||
|
|
b31d983f22 | ||
|
|
485cc11a24 | ||
|
|
fc1366f6ec | ||
|
|
e12a2ecd45 | ||
|
|
d47d4c8047 | ||
|
|
f13b180fb8 | ||
|
|
a3cf96418e | ||
|
|
0c27d0da4a | ||
|
|
4b1f530308 | ||
|
|
d98eadb536 | ||
|
|
e4b846c482 | ||
|
|
c310f094dd | ||
|
|
4366485427 | ||
|
|
8340258886 | ||
|
|
6ef6e50882 | ||
|
|
eb40427b00 | ||
|
|
7806af3040 | ||
|
|
abf1cea835 | ||
|
|
e5ade42937 | ||
|
|
6f079be771 | ||
|
|
100bd7bbef | ||
|
|
a6d3a44519 | ||
|
|
5e4af3a25d | ||
|
|
e812e63bb6 | ||
|
|
a589d4087e | ||
|
|
98d24e5629 | ||
|
|
903be79953 | ||
|
|
18ff14b615 | ||
|
|
36a249f5ae | ||
|
|
041757fc59 | ||
|
|
8f19113f88 | ||
|
|
cf1855ae37 | ||
|
|
652709d1b9 | ||
|
|
32dc499307 | ||
|
|
b742728ac2 | ||
|
|
237a258d2b | ||
|
|
5972e6d72e | ||
|
|
164027e682 | ||
|
|
736263f8fe | ||
|
|
3dde1f3512 | ||
|
|
d7d7567b0e | ||
|
|
0e4e857bab | ||
|
|
08d1f21d4f | ||
|
|
f3bd25eefa | ||
|
|
41f1810e52 | ||
|
|
d87ad69338 | ||
|
|
8242edb8ed | ||
|
|
3095a09bb0 | ||
|
|
e00cd12e3e | ||
|
|
a25536bc80 | ||
|
|
a2487fb969 | ||
|
|
e187d074ed | ||
|
|
89c5165e5a | ||
|
|
ba216f7d34 | ||
|
|
68f4f0d3bb | ||
|
|
12d9a244fa | ||
|
|
17573ee1cc | ||
|
|
b6975b4b1a | ||
|
|
b011dbdedf | ||
|
|
40babc141f | ||
|
|
5492b7d104 | ||
|
|
3c81243bb1 | ||
|
|
e2f72f11e4 | ||
|
|
7ba5ed7eed | ||
|
|
21f3020df6 | ||
|
|
b872c5adfd | ||
|
|
bbe8d375fd |
1
.github/codeql/codeql-config.yml
vendored
1
.github/codeql/codeql-config.yml
vendored
@@ -7,6 +7,7 @@ queries:
|
||||
# we include both even though one is a superset of the
|
||||
# other, because we're testing the parsing logic and
|
||||
# that the suites exist in the codeql bundle.
|
||||
- uses: security-experimental
|
||||
- uses: security-extended
|
||||
- uses: security-and-quality
|
||||
paths-ignore:
|
||||
|
||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -15,3 +15,7 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/setup-swift/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
|
||||
schedule:
|
||||
interval: weekly
|
||||
|
||||
2
.github/setup-swift/action.yml
vendored
2
.github/setup-swift/action.yml
vendored
@@ -26,7 +26,7 @@ runs:
|
||||
VERSION="5.7.0"
|
||||
fi
|
||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||
- uses: swift-actions/setup-swift@da0e3e04b5e3e15dbc3861bd835ad9f0afe56296 # Please update the corresponding SHA in the CLI's CodeQL Action Integration Test.
|
||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||
with:
|
||||
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
||||
|
||||
3
.github/workflows/__analyze-ref-input.yml
generated
vendored
3
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -88,6 +88,7 @@ jobs:
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
env:
|
||||
|
||||
2
.github/workflows/__autobuild-action.yml
generated
vendored
2
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -56,6 +56,8 @@ jobs:
|
||||
CORECLR_PROFILER: ''
|
||||
CORECLR_PROFILER_PATH_64: ''
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
95
.github/workflows/__config-export.yml
generated
vendored
Normal file
95
.github/workflows/__config-export.yml
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Config export
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
config-export:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Config export
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
queries: security-extended
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Check config properties appear in SARIF
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||
const run = sarif.runs[0];
|
||||
const configSummary = run.properties.codeqlConfigSummary;
|
||||
|
||||
if (configSummary === undefined) {
|
||||
core.setFailed('`codeqlConfigSummary` property not found in the SARIF run property bag.');
|
||||
}
|
||||
if (configSummary.disableDefaultQueries !== false) {
|
||||
core.setFailed('`disableDefaultQueries` property incorrect: expected false, got ' +
|
||||
`${JSON.stringify(configSummary.disableDefaultQueries)}.`);
|
||||
}
|
||||
const expectedQueries = [{ type: 'builtinSuite', uses: 'security-extended' }];
|
||||
// Use JSON.stringify to deep-equal the arrays.
|
||||
if (JSON.stringify(configSummary.queries) !== JSON.stringify(expectedQueries)) {
|
||||
core.setFailed(`\`queries\` property incorrect: expected ${JSON.stringify(expectedQueries)}, got ` +
|
||||
`${JSON.stringify(configSummary.queries)}.`);
|
||||
}
|
||||
core.info('Finished config export tests.');
|
||||
env:
|
||||
CODEQL_ACTION_EXPORT_CODE_SCANNING_CONFIG: true
|
||||
CODEQL_PASS_CONFIG_TO_CLI: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
99
.github/workflows/__diagnostics-export.yml
generated
vendored
Normal file
99
.github/workflows/__diagnostics-export.yml
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# pip install ruamel.yaml && python3 sync.py
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Diagnostic export
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
diagnostics-export:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
- os: windows-latest
|
||||
version: nightly-latest
|
||||
name: Diagnostic export
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
queries: security-extended
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- name: Manually add a diagnostic
|
||||
shell: bash
|
||||
env:
|
||||
CODEQL_PATH: ${{ steps.init.outputs.codeql-path }}
|
||||
run: |
|
||||
"$CODEQL_PATH" database add-diagnostic "$RUNNER_TEMP/codeql_databases/javascript" --plaintext-message="Plaintext message" --source-id="lang/diagnostics/example" --source-name="Diagnostic name"
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
- name: Upload SARIF
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Check diagnostics appear in SARIF
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||
with:
|
||||
script: |-
|
||||
const fs = require('fs');
|
||||
|
||||
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||
const run = sarif.runs[0];
|
||||
|
||||
const toolExecutionNotifications = run.invocations[0].toolExecutionNotifications;
|
||||
const diagnosticToolExecutionNotification = toolExecutionNotifications.filter(n => n.descriptor.id === 'lang/diagnostics/example' && n.message.text === 'Plaintext message');
|
||||
if (diagnosticToolExecutionNotification.length !== 1) {
|
||||
core.setFailed(`Expected exactly 1 entry for this diagnostic in the 'runs[].invocations[].toolExecutionNotifications[]' SARIF property, found ${diagnosticToolExecutionNotification.length}`);
|
||||
}
|
||||
|
||||
const notifications = run.tool.driver.notifications;
|
||||
const diagnosticNotification = notifications.filter(n => n.id === 'lang/diagnostics/example' && n.name === 'lang/diagnostics/example' && n.fullDescription.text && 'Diagnostic name');
|
||||
if (diagnosticNotification.length !== 1) {
|
||||
core.setFailed(`Expected exactly 1 entry for this diagnostic in the 'runs[].tool.driver.notifications[]' SARIF property, found ${diagnosticNotification.length}`);
|
||||
}
|
||||
|
||||
core.info('Finished diagnostic export test');
|
||||
env:
|
||||
CODEQL_ACTION_EXPORT_DIAGNOSTICS: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
4
.github/workflows/__go-custom-queries.yml
generated
vendored
4
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -86,6 +86,8 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
env:
|
||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
4
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
4
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -71,6 +71,8 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/autobuild
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
- shell: bash
|
||||
run: |
|
||||
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
|
||||
|
||||
4
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
4
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -73,6 +73,8 @@ jobs:
|
||||
shell: bash
|
||||
run: go build main.go
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
- shell: bash
|
||||
run: |
|
||||
# Once we start running Bash 4.2 in all environments, we can replace the
|
||||
|
||||
4
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
4
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -70,6 +70,8 @@ jobs:
|
||||
languages: go
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
- shell: bash
|
||||
run: |
|
||||
cd "$RUNNER_TEMP/codeql_databases"
|
||||
|
||||
42
.github/workflows/__init-with-registries.yml
generated
vendored
42
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -25,6 +25,18 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: windows-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: windows-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
@@ -75,5 +87,35 @@ jobs:
|
||||
echo "::error $CODEQL_PACK1 pack was not installed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify qlconfig.yml file was created
|
||||
shell: bash
|
||||
run: |
|
||||
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||
echo "Expected qlconfig.yml file to be created at $QLCONFIG_PATH"
|
||||
if [[ -f $QLCONFIG_PATH ]]
|
||||
then
|
||||
echo "qlconfig.yml file was created."
|
||||
else
|
||||
echo "::error qlconfig.yml file was not created."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify contents of qlconfig.yml
|
||||
# yq is not available on windows
|
||||
if: runner.os != 'Windows'
|
||||
shell: bash
|
||||
run: |
|
||||
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||
cat $QLCONFIG_PATH | yq -e '.registries[] | select(.url == "https://ghcr.io/v2/") | select(.packages == "*/*")'
|
||||
if [[ $? -eq 0 ]]
|
||||
then
|
||||
echo "Registry was added to qlconfig.yml file."
|
||||
else
|
||||
echo "::error Registry was not added to qlconfig.yml file."
|
||||
echo "Contents of qlconfig.yml file:"
|
||||
cat $QLCONFIG_PATH
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
1
.github/workflows/__javascript-source-root.yml
generated
vendored
1
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -54,6 +54,7 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
skip-queries: true
|
||||
upload: false
|
||||
- name: Assert database exists
|
||||
|
||||
2
.github/workflows/__ml-powered-queries.yml
generated
vendored
2
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
4
.github/workflows/__multi-language-autodetect.yml
generated
vendored
4
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -81,6 +81,8 @@ jobs:
|
||||
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
|
||||
- name: Check language autodetect for all languages excluding Ruby, Swift
|
||||
shell: bash
|
||||
|
||||
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -66,6 +66,7 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
|
||||
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -66,6 +66,7 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
|
||||
1
.github/workflows/__packaging-config-js.yml
generated
vendored
1
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -65,6 +65,7 @@ jobs:
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
|
||||
- name: Check results
|
||||
uses: ./../action/.github/check-sarif
|
||||
|
||||
2
.github/workflows/__remote-config.yml
generated
vendored
2
.github/workflows/__remote-config.yml
generated
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
2
.github/workflows/__ruby.yml
generated
vendored
2
.github/workflows/__ruby.yml
generated
vendored
@@ -54,6 +54,8 @@ jobs:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
1
.github/workflows/__split-workflow.yml
generated
vendored
1
.github/workflows/__split-workflow.yml
generated
vendored
@@ -61,6 +61,7 @@ jobs:
|
||||
with:
|
||||
skip-queries: true
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
|
||||
- name: Assert No Results
|
||||
shell: bash
|
||||
|
||||
2
.github/workflows/__swift-autobuild.yml
generated
vendored
2
.github/workflows/__swift-autobuild.yml
generated
vendored
@@ -57,6 +57,8 @@ jobs:
|
||||
timeout-minutes: 10
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/__swift-custom-build.yml
generated
vendored
2
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -64,6 +64,8 @@ jobs:
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -53,6 +53,8 @@ jobs:
|
||||
with:
|
||||
working-directory: autobuild-dir
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
- name: Check database
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -51,5 +51,7 @@ jobs:
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
2
.github/workflows/__test-proxy.yml
generated
vendored
2
.github/workflows/__test-proxy.yml
generated
vendored
@@ -43,6 +43,8 @@ jobs:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
4
.github/workflows/__unset-environment.yml
generated
vendored
4
.github/workflows/__unset-environment.yml
generated
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -65,6 +65,8 @@ jobs:
|
||||
./build.sh
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
- shell: bash
|
||||
run: |
|
||||
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
||||
|
||||
3
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
3
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
@@ -88,6 +88,7 @@ jobs:
|
||||
run: ./build.sh
|
||||
- uses: ./../action/analyze
|
||||
with:
|
||||
upload-database: false
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
upload: false
|
||||
|
||||
3
.github/workflows/__with-checkout-path.yml
generated
vendored
3
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set up Go
|
||||
if: matrix.os == 'ubuntu-20.04' || matrix.os == 'windows-2019'
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: actions/checkout@v3
|
||||
@@ -104,6 +104,7 @@ jobs:
|
||||
ref: v1.1.0
|
||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||
upload: false
|
||||
upload-database: false
|
||||
|
||||
- uses: ./../action/upload-sarif
|
||||
with:
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: latest
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
2
.github/workflows/debug-artifacts.yml
vendored
2
.github/workflows/debug-artifacts.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
uses: ./.github/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- uses: actions/setup-go@v3
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ^1.13.1
|
||||
- uses: ./../action/init
|
||||
|
||||
10
.github/workflows/script/check-node-modules.sh
vendored
10
.github/workflows/script/check-node-modules.sh
vendored
@@ -7,13 +7,9 @@ if [ ! -z "$(git status --porcelain)" ]; then
|
||||
>&2 echo "Failed: Repo should be clean before testing!"
|
||||
exit 1
|
||||
fi
|
||||
# When updating this, make sure to update the npm version in
|
||||
# `.github/workflows/update-dependencies.yml` too.
|
||||
sudo npm install --force -g npm@9.2.0
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
|
||||
"$(dirname "$0")/update-node-modules.sh" check-only
|
||||
|
||||
# Check that repo is still clean
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
# If we get a fail here then the PR needs attention
|
||||
|
||||
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
@@ -0,0 +1,18 @@
|
||||
if [ "$1" != "update" && "$1" != "check-only" ]; then
|
||||
>&2 echo "Failed: Invalid argument. Must be 'update' or 'check-only'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sudo npm install --force -g npm@9.2.0
|
||||
|
||||
# clean the npm cache to ensure we don't have any files owned by root
|
||||
sudo npm cache clean --force
|
||||
|
||||
if [ "$1" = "update" ]; then
|
||||
npm install
|
||||
fi
|
||||
|
||||
# Reinstall modules and then clean to remove absolute paths
|
||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
7
.github/workflows/update-dependencies.yml
vendored
7
.github/workflows/update-dependencies.yml
vendored
@@ -27,12 +27,7 @@ jobs:
|
||||
run: |
|
||||
git fetch origin "$BRANCH" --depth=1
|
||||
git checkout "origin/$BRANCH"
|
||||
# When updating this, make sure to update the npm version in
|
||||
# `.github/workflows/script/check-node-modules.sh` too.
|
||||
sudo npm install --force -g npm@9.2.0
|
||||
npm install
|
||||
npm ci
|
||||
npm run removeNPMAbsolutePaths
|
||||
.github/workflows/script/update-node-modules.sh update
|
||||
if [ ! -z "$(git status --porcelain)" ]; then
|
||||
git config --global user.email "github-actions@github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
@@ -3,6 +3,7 @@ name: Update Supported Enterprise Server Versions
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update-supported-enterprise-server-versions:
|
||||
@@ -35,7 +36,7 @@ jobs:
|
||||
env:
|
||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||
- name: Commit Changes
|
||||
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
|
||||
uses: peter-evans/create-pull-request@38e0b6e68b4c852a5500a94740f0e535e0d7ba54 # v4.2.4
|
||||
with:
|
||||
commit-message: Update supported GitHub Enterprise Server versions.
|
||||
title: Update supported GitHub Enterprise Server versions.
|
||||
|
||||
@@ -15,6 +15,11 @@ def main():
|
||||
api_compatibility_data = json.loads(_API_COMPATIBILITY_PATH.read_text())
|
||||
|
||||
releases = json.loads(_RELEASE_FILE_PATH.read_text())
|
||||
|
||||
# Remove GHES version using a previous version numbering scheme.
|
||||
if "11.10.340" in releases:
|
||||
del releases["11.10.340"]
|
||||
|
||||
oldest_supported_release = None
|
||||
newest_supported_release = semver.VersionInfo.parse(api_compatibility_data["maximumVersion"] + ".0")
|
||||
|
||||
|
||||
20
CHANGELOG.md
20
CHANGELOG.md
@@ -1,5 +1,25 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## 2.2.8 - 22 Mar 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.5. [#1585](https://github.com/github/codeql-action/pull/1585)
|
||||
|
||||
## 2.2.7 - 15 Mar 2023
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.2.6 - 10 Mar 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.4. [#1561](https://github.com/github/codeql-action/pull/1561)
|
||||
|
||||
## 2.2.5 - 24 Feb 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.3. [#1543](https://github.com/github/codeql-action/pull/1543)
|
||||
|
||||
## 2.2.4 - 10 Feb 2023
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 2.2.3 - 08 Feb 2023
|
||||
|
||||
- Update default CodeQL bundle version to 2.12.2. [#1518](https://github.com/github/codeql-action/pull/1518)
|
||||
|
||||
@@ -67,12 +67,8 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||
|
||||
Approve the mergeback PR and automerge it.
|
||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
||||
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
||||
|
||||
Review the checklist items in the pull request description.
|
||||
Once you've checked off all the items, approve the PR and automerge it.
|
||||
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
||||
Once the mergeback has been merged to `main`, the release is complete.
|
||||
|
||||
## Keeping the PR checks up to date (admin access required)
|
||||
|
||||
|
||||
4
lib/analyze-action.js
generated
4
lib/analyze-action.js
generated
@@ -103,7 +103,7 @@ function doesGoExtractionOutputExist(config) {
|
||||
* an autobuild step or manual build steps.
|
||||
*
|
||||
* - We detect whether an autobuild step is present by checking the
|
||||
* `util.DID_AUTOBUILD_GO_ENV_VAR_NAME` environment variable, which is set
|
||||
* `CODEQL_ACTION_DID_AUTOBUILD_GOLANG` environment variable, which is set
|
||||
* when the autobuilder is invoked.
|
||||
* - We detect whether the Go database has already been finalized in case it
|
||||
* has been manually set in a prior Action step.
|
||||
@@ -114,7 +114,7 @@ async function runAutobuildIfLegacyGoWorkflow(config, logger) {
|
||||
if (!config.languages.includes(languages_1.Language.go)) {
|
||||
return;
|
||||
}
|
||||
if (process.env[util.DID_AUTOBUILD_GO_ENV_VAR_NAME] === "true") {
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_DID_AUTOBUILD_GOLANG] === "true") {
|
||||
logger.debug("Won't run Go autobuild since it has already been run.");
|
||||
return;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
40
lib/analyze.js
generated
40
lib/analyze.js
generated
@@ -123,16 +123,17 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger)
|
||||
};
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, features) {
|
||||
const statusReport = {};
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
||||
const queryFlags = [memoryFlag, threadsFlag];
|
||||
await util.logCodeScanningConfigInCli(codeql, features, logger);
|
||||
for (const language of config.languages) {
|
||||
const queries = config.queries[language];
|
||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||
const packsWithVersion = config.packs[language] || [];
|
||||
try {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||
// If we are using the code scanning config in the CLI,
|
||||
// much of the work needed to generate the query suites
|
||||
// is done in the CLI. We just need to make a single
|
||||
@@ -140,7 +141,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
// another to interpret the results.
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const startTimeBuiltIn = new Date().getTime();
|
||||
await runQueryGroup(language, "all", undefined, undefined);
|
||||
await runQueryGroup(language, "all", undefined, undefined, true);
|
||||
// TODO should not be using `builtin` here. We should be using `all` instead.
|
||||
// The status report does not support `all` yet.
|
||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||
@@ -164,24 +165,29 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
!hasPackWithCustomQueries) {
|
||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||
}
|
||||
const customQueryIndices = [];
|
||||
for (let i = 0; i < queries.custom.length; ++i) {
|
||||
if (queries.custom[i].queries.length > 0) {
|
||||
customQueryIndices.push(i);
|
||||
}
|
||||
}
|
||||
logger.startGroup(`Running queries for ${language}`);
|
||||
const querySuitePaths = [];
|
||||
if (queries["builtin"].length > 0) {
|
||||
if (queries.builtin.length > 0) {
|
||||
const startTimeBuiltIn = new Date().getTime();
|
||||
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
|
||||
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries.builtin, queryFilters), undefined, customQueryIndices.length === 0 && packsWithVersion.length === 0)));
|
||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||
new Date().getTime() - startTimeBuiltIn;
|
||||
}
|
||||
const startTimeCustom = new Date().getTime();
|
||||
let ranCustom = false;
|
||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
||||
if (queries["custom"][i].queries.length > 0) {
|
||||
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
|
||||
ranCustom = true;
|
||||
}
|
||||
for (const i of customQueryIndices) {
|
||||
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries.custom[i].queries, queryFilters), queries.custom[i].searchPath, i === customQueryIndices[customQueryIndices.length - 1] &&
|
||||
packsWithVersion.length === 0)));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (packsWithVersion.length > 0) {
|
||||
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
|
||||
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters, true));
|
||||
ranCustom = true;
|
||||
}
|
||||
if (ranCustom) {
|
||||
@@ -212,13 +218,13 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, config, features);
|
||||
}
|
||||
async function runPrintLinesOfCode(language) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databasePrintBaseline(databasePath);
|
||||
}
|
||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
||||
async function runQueryGroup(language, type, querySuiteContents, searchPath, optimizeForLastQueryRun) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
@@ -229,11 +235,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||
}
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
return querySuitePath;
|
||||
}
|
||||
async function runQueryPacks(language, type, packs, queryFilters) {
|
||||
async function runQueryPacks(language, type, packs, queryFilters, optimizeForLastQueryRun) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
for (const pack of packs) {
|
||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||
@@ -243,7 +249,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
|
||||
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||
return querySuitePath;
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
122
lib/analyze.test.js
generated
122
lib/analyze.test.js
generated
@@ -30,8 +30,10 @@ const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
@@ -188,6 +190,126 @@ const util = __importStar(require("./util"));
|
||||
}
|
||||
}
|
||||
});
|
||||
function mockCodeQL() {
|
||||
return {
|
||||
getVersion: async () => "2.12.2",
|
||||
databaseRunQueries: sinon.spy(),
|
||||
databaseInterpretResults: async () => "",
|
||||
databasePrintBaseline: async () => "",
|
||||
};
|
||||
}
|
||||
function createBaseConfig(tmpDir) {
|
||||
return {
|
||||
languages: [],
|
||||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
tempDir: "tempDir",
|
||||
codeQLCmd: "",
|
||||
gitHubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||
packs: {},
|
||||
debugMode: false,
|
||||
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||
augmentationProperties: {
|
||||
injectedMlQueries: false,
|
||||
packsInputCombines: false,
|
||||
queriesInputCombines: false,
|
||||
},
|
||||
trapCaches: {},
|
||||
trapCacheDownloadTime: 0,
|
||||
};
|
||||
}
|
||||
function createQueryConfig(builtin, custom) {
|
||||
return {
|
||||
builtin,
|
||||
custom: custom.map((c) => ({ searchPath: "/search", queries: [c] })),
|
||||
};
|
||||
}
|
||||
async function runQueriesWithConfig(config, features) {
|
||||
for (const language of config.languages) {
|
||||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
});
|
||||
}
|
||||
return (0, analyze_1.runQueries)("sarif-folder", "--memFlag", "--addSnippetsFlag", "--threadsFlag", undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)(features));
|
||||
}
|
||||
function getDatabaseRunQueriesCalls(mock) {
|
||||
return mock.databaseRunQueries.getCalls();
|
||||
}
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for one language", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp];
|
||||
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||
await runQueriesWithConfig(config, []);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for two languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||
config.queries.java = createQueryConfig(["bar.ql"], []);
|
||||
await runQueriesWithConfig(config, []);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||
await runQueriesWithConfig(config, []);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, true, false, true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries and packs", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||
config.packs.cpp = ["a/cpp-pack1@0.1.0"];
|
||||
config.packs.java = ["b/java-pack1@0.2.0", "b/java-pack2@0.3.3"];
|
||||
await runQueriesWithConfig(config, []);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, false, true, false, false, true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for one language, CliConfigFileEnabled", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp];
|
||||
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("optimizeForLastQueryRun for two languages, CliConfigFileEnabled", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const codeql = mockCodeQL();
|
||||
(0, codeql_1.setCodeQL)(codeql);
|
||||
const config = createBaseConfig(tmpDir);
|
||||
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("validateQueryFilters", (t) => {
|
||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.8", "minimumVersion": "3.4" }
|
||||
{ "maximumVersion": "3.9", "minimumVersion": "3.5" }
|
||||
|
||||
3
lib/autobuild-action.js
generated
3
lib/autobuild-action.js
generated
@@ -30,6 +30,7 @@ const autobuild_1 = require("./autobuild");
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const shared_environment_1 = require("./shared-environment");
|
||||
const util_1 = require("./util");
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
(0, util_1.initializeEnvironment)((0, actions_util_1.getActionVersion)());
|
||||
@@ -68,7 +69,7 @@ async function run() {
|
||||
currentLanguage = language;
|
||||
await (0, autobuild_1.runAutobuild)(language, config, logger);
|
||||
if (language === languages_1.Language.go) {
|
||||
core.exportVariable(util_1.DID_AUTOBUILD_GO_ENV_VAR_NAME, "true");
|
||||
core.exportVariable(shared_environment_1.CODEQL_ACTION_DID_AUTOBUILD_GOLANG, "true");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAQwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,2CAAuC;AACvC,uCAA6C;AAC7C,iCAIgB;AAShB,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,oCAA6B,EAAE,MAAM,CAAC,CAAC;iBAC5D;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"autobuild-action.js","sourceRoot":"","sources":["../src/autobuild-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAQwB;AACxB,6CAAgD;AAChD,2CAAwE;AACxE,4DAA8C;AAC9C,2CAAuC;AACvC,uCAA6C;AAC7C,6DAA0E;AAC1E,iCAA0E;AAS1E,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;IAEb,IAAA,4BAAqB,EAAC,IAAA,+BAAgB,GAAE,CAAC,CAAC;IAE1C,MAAM,MAAM,GAAG,IAAA,+BAAgB,EAAC,KAAK,EAAE,eAAe,CAAC,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,IAAA,qCAAsB,EACnD,WAAW,EACX,MAAM,EACN,SAAS,EACT,KAAK,EAAE,OAAO,EACd,KAAK,EAAE,KAAK,CACb,CAAC;IACF,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAA,+BAAgB,EAAC,YAAY,CAAC,CAAC;AACvC,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAClC,IAAI,eAAe,GAAyB,SAAS,CAAC;IACtD,IAAI,SAAS,GAA2B,SAAS,CAAC;IAClD,IAAI;QACF,IACE,CAAC,CAAC,MAAM,IAAA,+BAAgB,EACtB,MAAM,IAAA,qCAAsB,EAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,CACjE,CAAC,EACF;YACA,OAAO;SACR;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,CAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;YACxB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,SAAS,GAAG,MAAM,IAAA,uCAA2B,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9D,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;YAC/D,IAAI,gBAAgB,EAAE;gBACpB,MAAM,CAAC,IAAI,CACT,6CAA6C,gBAAgB,EAAE,CAChE,CAAC;gBACF,OAAO,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC;aACjC;YACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;gBAChC,eAAe,GAAG,QAAQ,CAAC;gBAC3B,MAAM,IAAA,wBAAY,EAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC;gBAC7C,IAAI,QAAQ,KAAK,oBAAQ,CAAC,EAAE,EAAE;oBAC5B,IAAI,CAAC,cAAc,CAAC,uDAAkC,EAAE,MAAM,CAAC,CAAC;iBACjE;aACF;SACF;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CACZ,mIACE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CACvD,EAAE,CACH,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,yBAAyB,CAC7B,SAAS,EACT,SAAS,IAAI,EAAE,EACf,eAAe,EACf,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAC1D,CAAC;QACF,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,SAAS,IAAI,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,KAAK,EAAE,CAAC,CAAC;QACpD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
84
lib/codeql.js
generated
84
lib/codeql.js
generated
@@ -23,13 +23,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const error_matcher_1 = require("./error-matcher");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||
@@ -98,6 +100,10 @@ exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
||||
* Versions 2.11.1+ of the CodeQL Bundle include a `security-experimental` built-in query suite for each language.
|
||||
*/
|
||||
exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = "2.12.1";
|
||||
/**
|
||||
* Versions 2.12.4+ of the CodeQL CLI support the `--qlconfig-file` flag in calls to `database init`.
|
||||
*/
|
||||
exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = "2.12.4";
|
||||
/**
|
||||
* Set up CodeQL CLI access.
|
||||
*
|
||||
@@ -183,6 +189,7 @@ function setCodeQL(partialCodeql) {
|
||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||
databaseExportDiagnostics: resolveFunction(partialCodeql, "databaseExportDiagnostics"),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
@@ -302,7 +309,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
...getExtraOptionsFromEnv(["database", "init"]),
|
||||
]);
|
||||
},
|
||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger) {
|
||||
async databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger) {
|
||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||
extraArgs.push("--begin-tracing");
|
||||
@@ -320,17 +327,21 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
extraArgs.push("--no-internal-use-lua-tracing");
|
||||
}
|
||||
}
|
||||
// A config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement, logger);
|
||||
// A code scanning config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||
const codeScanningConfigFile = await generateCodeScanningConfig(codeql, config, features, logger);
|
||||
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
||||
let externalRepositoryToken;
|
||||
if (configLocation) {
|
||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
||||
if (codeScanningConfigFile) {
|
||||
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||
extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`);
|
||||
if (externalRepositoryToken) {
|
||||
extraArgs.push("--external-repository-token-stdin");
|
||||
}
|
||||
}
|
||||
if (qlconfigFile !== undefined &&
|
||||
(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_INIT_WITH_QLCONFIG))) {
|
||||
extraArgs.push(`--qlconfig-file=${qlconfigFile}`);
|
||||
}
|
||||
await runTool(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
@@ -472,17 +483,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
||||
}
|
||||
},
|
||||
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, memoryFlag, threadsFlag) {
|
||||
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, flags, optimizeForLastQueryRun) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"run-queries",
|
||||
memoryFlag,
|
||||
threadsFlag,
|
||||
...flags,
|
||||
databasePath,
|
||||
"--min-disk-free=1024",
|
||||
"-v",
|
||||
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
||||
];
|
||||
if (optimizeForLastQueryRun &&
|
||||
(await util.supportExpectDiscardedCache(this))) {
|
||||
codeqlArgs.push("--expect-discarded-cache");
|
||||
}
|
||||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||
}
|
||||
@@ -491,7 +505,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
}
|
||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, config, features) {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"interpret-results",
|
||||
@@ -503,6 +517,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
"--print-diagnostics-summary",
|
||||
"--print-metrics-summary",
|
||||
"--sarif-group-rules-by-pack",
|
||||
...(await getCodeScanningConfigExportArguments(config, this, features)),
|
||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||
];
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
|
||||
@@ -513,6 +528,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_FILE_BASELINE_INFORMATION)) {
|
||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||
}
|
||||
if (await features.getValue(feature_flags_1.Feature.ExportDiagnosticsEnabled, this)) {
|
||||
codeqlArgs.push("--sarif-include-diagnostics");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
if (querySuitePaths) {
|
||||
codeqlArgs.push(...querySuitePaths);
|
||||
@@ -595,12 +613,30 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
];
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(sarifFile, automationDetailsId) {
|
||||
async databaseExportDiagnostics(databasePath, sarifFile, automationDetailsId) {
|
||||
const args = [
|
||||
"database",
|
||||
"export-diagnostics",
|
||||
`${databasePath}`,
|
||||
"--db-cluster",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
"--sarif-include-diagnostics",
|
||||
"-vvv",
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
args.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||
},
|
||||
async diagnosticsExport(sarifFile, automationDetailsId, config, features) {
|
||||
const args = [
|
||||
"diagnostics",
|
||||
"export",
|
||||
"--format=sarif-latest",
|
||||
`--output=${sarifFile}`,
|
||||
...(await getCodeScanningConfigExportArguments(config, this, features)),
|
||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||
];
|
||||
if (automationDetailsId !== undefined) {
|
||||
@@ -711,11 +747,11 @@ async function runTool(cmd, args = [], opts = {}) {
|
||||
* @param config The configuration to use.
|
||||
* @returns the path to the generated user configuration file.
|
||||
*/
|
||||
async function generateCodeScanningConfig(codeql, config, featureEnablement, logger) {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
||||
async function generateCodeScanningConfig(codeql, config, features, logger) {
|
||||
if (!(await util.useCodeScanningConfigInCli(codeql, features))) {
|
||||
return;
|
||||
}
|
||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
||||
const codeScanningConfigFile = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
||||
// make a copy so we can modify it
|
||||
const augmentedConfig = cloneObject(config.originalUserInput);
|
||||
// Inject the queries from the input
|
||||
@@ -769,14 +805,28 @@ async function generateCodeScanningConfig(codeql, config, featureEnablement, log
|
||||
augmentedConfig.packs["javascript"].push(packString);
|
||||
}
|
||||
}
|
||||
logger.info(`Writing augmented user configuration file to ${configLocation}`);
|
||||
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
||||
logger.startGroup("Augmented user configuration file contents");
|
||||
logger.info(yaml.dump(augmentedConfig));
|
||||
logger.endGroup();
|
||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
||||
return configLocation;
|
||||
fs.writeFileSync(codeScanningConfigFile, yaml.dump(augmentedConfig));
|
||||
return codeScanningConfigFile;
|
||||
}
|
||||
function cloneObject(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
/**
|
||||
* Gets arguments for passing the code scanning configuration file to interpretation commands like
|
||||
* `codeql database interpret-results` and `codeql database export-diagnostics`.
|
||||
*
|
||||
* Returns an empty list if a code scanning configuration file was not generated by the CLI.
|
||||
*/
|
||||
async function getCodeScanningConfigExportArguments(config, codeql, features) {
|
||||
const codeScanningConfigPath = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
||||
if (fs.existsSync(codeScanningConfigPath) &&
|
||||
(await features.getValue(feature_flags_1.Feature.ExportCodeScanningConfigEnabled, codeql))) {
|
||||
return ["--sarif-codescanning-config", codeScanningConfigPath];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
//# sourceMappingURL=codeql.js.map
|
||||
File diff suppressed because one or more lines are too long
83
lib/codeql.test.js
generated
83
lib/codeql.test.js
generated
@@ -424,7 +424,7 @@ for (const isBundleVersionInUrl of [true, false]) {
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||
@@ -433,7 +433,7 @@ for (const isBundleVersionInUrl of [true, false]) {
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||
@@ -452,11 +452,11 @@ for (const isBundleVersionInUrl of [true, false]) {
|
||||
packsInputCombines: false,
|
||||
},
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should NOT have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.falsy(configArg, "Should have injected a codescanning config");
|
||||
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
||||
});
|
||||
});
|
||||
// Test macro for ensuring different variants of injected augmented configurations
|
||||
@@ -474,7 +474,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
tempDir,
|
||||
augmentationProperties,
|
||||
};
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
@@ -665,24 +665,67 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
queries: [],
|
||||
},
|
||||
}, {});
|
||||
(0, ava_1.default)("does not use injected config", async (t) => {
|
||||
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
|
||||
try {
|
||||
(0, ava_1.default)("does not pass a code scanning config or qlconfig file to the CLI when CLI config passing is disabled", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
// stubbed version doesn't matter. It just needs to be valid semver.
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should not have used a config file
|
||||
const hasConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.false(hasConfigArg, "Should NOT have injected a codescanning config");
|
||||
// should not have passed a qlconfig file
|
||||
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||
t.false(hasQlconfigArg, "Should NOT have passed a qlconfig file");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("passes a code scanning config AND qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used an config file
|
||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
||||
}
|
||||
finally {
|
||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||
}
|
||||
// should have used a config file
|
||||
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
|
||||
// should have passed a qlconfig file
|
||||
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("passes a code scanning config BUT NOT a qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.12.2");
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
// should have used a config file
|
||||
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||
t.true(hasCodeScanningConfigArg, "Should have injected a codescanning config");
|
||||
// should not have passed a qlconfig file
|
||||
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("does not pass a qlconfig to the CLI when it is undefined", async (t) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, // undefined qlconfigFile
|
||||
(0, logging_1.getRunnerLogger)(true));
|
||||
const args = runnerConstructorStub.firstCall.args[1];
|
||||
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
@@ -690,7 +733,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||
});
|
||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||
@@ -699,7 +742,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
function stubToolRunnerConstructor() {
|
||||
|
||||
File diff suppressed because one or more lines are too long
113
lib/config-utils.js
generated
113
lib/config-utils.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||
exports.getGeneratedCodeScanningConfigPath = exports.wrapEnvironment = exports.generateRegistries = exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
@@ -141,7 +141,7 @@ const builtinSuites = [
|
||||
* Throws an error if suiteName is not a valid builtin suite.
|
||||
* May inject ML queries, and the return value will declare if this was done.
|
||||
*/
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureEnablement, configFile) {
|
||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, features, configFile) {
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
@@ -149,7 +149,7 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
}
|
||||
if (suiteName === "security-experimental" &&
|
||||
!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE))) {
|
||||
throw new Error(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||
throw new Error(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE}. Please upgrade to CodeQL CLI version
|
||||
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE} or later.`);
|
||||
}
|
||||
@@ -165,7 +165,7 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
found === "security-extended" ||
|
||||
found === "security-and-quality") &&
|
||||
!packs.javascript?.some(isMlPoweredJsQueriesPack) &&
|
||||
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||
(await features.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||
if (!packs.javascript) {
|
||||
packs.javascript = [];
|
||||
}
|
||||
@@ -240,7 +240,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
*
|
||||
* @returns whether or not we injected ML queries into the packs
|
||||
*/
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile) {
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, features, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
@@ -252,12 +252,12 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
||||
}
|
||||
// Check for one of the builtin suites
|
||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, features, configFile);
|
||||
}
|
||||
// Otherwise, must be a reference to another repo.
|
||||
// If config parsing is handled in CLI, then this repo will be downloaded
|
||||
// later by the CLI.
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||
}
|
||||
return false;
|
||||
@@ -503,13 +503,13 @@ async function getRawLanguages(languagesInput, repository, logger) {
|
||||
return { rawLanguages, autodetected };
|
||||
}
|
||||
exports.getRawLanguages = getRawLanguages;
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
|
||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, features, logger) {
|
||||
let injectedMlQueries = false;
|
||||
queriesInput = queriesInput.trim();
|
||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||
queriesInput = queriesInput.replace(/^\+/, "");
|
||||
for (const query of queriesInput.split(",")) {
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, features, logger);
|
||||
injectedMlQueries = injectedMlQueries || didInject;
|
||||
}
|
||||
return injectedMlQueries;
|
||||
@@ -527,7 +527,7 @@ function shouldAddConfigFileQueries(queriesInput) {
|
||||
/**
|
||||
* Get the default config for when the user has not supplied one.
|
||||
*/
|
||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
||||
const queries = {};
|
||||
for (const language of languages) {
|
||||
@@ -545,7 +545,7 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
|
||||
: {};
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, features, logger);
|
||||
}
|
||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||
return {
|
||||
@@ -581,7 +581,7 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg
|
||||
/**
|
||||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||
let parsedYAML;
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
@@ -629,7 +629,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
// in the config file.
|
||||
if (rawQueriesInput) {
|
||||
augmentationProperties.injectedMlQueries =
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, features, logger);
|
||||
}
|
||||
if (shouldAddConfigFileQueries(rawQueriesInput) &&
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
@@ -641,7 +641,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueriesMissingUses(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile);
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, features, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
@@ -725,7 +725,7 @@ function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||
}
|
||||
const trimmedInput = queriesInputCombines
|
||||
? rawQueriesInput.trim().slice(1).trim()
|
||||
: rawQueriesInput?.trim();
|
||||
: rawQueriesInput?.trim() ?? "";
|
||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
}
|
||||
@@ -932,21 +932,21 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
||||
* This will parse the config from the user input if present, or generate
|
||||
* a default config. The parsed config is then stored to a known location.
|
||||
*/
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||
let config;
|
||||
// If no config file was provided create an empty one
|
||||
if (!configFile) {
|
||||
logger.debug("No configuration file was provided");
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||
}
|
||||
else {
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||
}
|
||||
// When using the codescanning config in the CLI, pack downloads
|
||||
// happen in the CLI during the `database init` command, so no need
|
||||
// to download them here.
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, features, logger);
|
||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||
// The list of queries should not be empty for any language. If it is then
|
||||
// it is a user configuration error.
|
||||
// This check occurs in the CLI when it parses the config file.
|
||||
@@ -959,8 +959,7 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
const registries = parseRegistries(registriesInput);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
||||
await downloadPacks(codeQL, config.languages, config.packs, apiDetails, registriesInput, config.tempDir, logger);
|
||||
}
|
||||
// Save the config so we can easily access it again in the future
|
||||
await saveConfig(config, logger);
|
||||
@@ -1056,21 +1055,9 @@ async function getConfig(tempDir, logger) {
|
||||
return JSON.parse(configString);
|
||||
}
|
||||
exports.getConfig = getConfig;
|
||||
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
|
||||
let qlconfigFile;
|
||||
let registriesAuthTokens;
|
||||
if (registries) {
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
|
||||
}
|
||||
// generate a qlconfig.yml file to hold the registry configs.
|
||||
const qlconfig = createRegistriesBlock(registries);
|
||||
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
|
||||
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
|
||||
registriesAuthTokens = registries
|
||||
.map((registry) => `${registry.url}=${registry.token}`)
|
||||
.join(",");
|
||||
}
|
||||
async function downloadPacks(codeQL, languages, packs, apiDetails, registriesInput, tempDir, logger) {
|
||||
// This code path is only used when config parsing occurs in the Action.
|
||||
const { registriesAuthTokens, qlconfigFile } = await generateRegistries(registriesInput, codeQL, tempDir, logger);
|
||||
await wrapEnvironment({
|
||||
GITHUB_TOKEN: apiDetails.auth,
|
||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||
@@ -1098,6 +1085,48 @@ async function downloadPacks(codeQL, languages, packs, registries, apiDetails, t
|
||||
});
|
||||
}
|
||||
exports.downloadPacks = downloadPacks;
|
||||
/**
|
||||
* Generate a `qlconfig.yml` file from the `registries` input.
|
||||
* This file is used by the CodeQL CLI to list the registries to use for each
|
||||
* pack.
|
||||
*
|
||||
* @param registriesInput The value of the `registries` input.
|
||||
* @param codeQL a codeQL object, used only for checking the version of CodeQL.
|
||||
* @param tempDir a temporary directory to store the generated qlconfig.yml file.
|
||||
* @param logger a logger object.
|
||||
* @returns The path to the generated `qlconfig.yml` file and the auth tokens to
|
||||
* use for each registry.
|
||||
*/
|
||||
async function generateRegistries(registriesInput, codeQL, tempDir, logger) {
|
||||
const registries = parseRegistries(registriesInput);
|
||||
let registriesAuthTokens;
|
||||
let qlconfigFile;
|
||||
if (registries) {
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||
throw new Error(`The 'registries' input is not supported on CodeQL CLI versions earlier than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}. Please upgrade to CodeQL CLI version ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD} or later.`);
|
||||
}
|
||||
// generate a qlconfig.yml file to hold the registry configs.
|
||||
const qlconfig = createRegistriesBlock(registries);
|
||||
qlconfigFile = path.join(tempDir, "qlconfig.yml");
|
||||
const qlconfigContents = yaml.dump(qlconfig);
|
||||
fs.writeFileSync(qlconfigFile, qlconfigContents, "utf8");
|
||||
logger.debug("Generated qlconfig.yml:");
|
||||
logger.debug(qlconfigContents);
|
||||
registriesAuthTokens = registries
|
||||
.map((registry) => `${registry.url}=${registry.token}`)
|
||||
.join(",");
|
||||
}
|
||||
if (typeof process.env.CODEQL_REGISTRIES_AUTH === "string") {
|
||||
logger.debug("Using CODEQL_REGISTRIES_AUTH environment variable to authenticate with registries.");
|
||||
}
|
||||
return {
|
||||
registriesAuthTokens:
|
||||
// if the user has explicitly set the CODEQL_REGISTRIES_AUTH env var then use that
|
||||
process.env.CODEQL_REGISTRIES_AUTH ?? registriesAuthTokens,
|
||||
qlconfigFile,
|
||||
};
|
||||
}
|
||||
exports.generateRegistries = generateRegistries;
|
||||
function createRegistriesBlock(registries) {
|
||||
if (!Array.isArray(registries) ||
|
||||
registries.some((r) => !r.url || !r.packages)) {
|
||||
@@ -1147,4 +1176,14 @@ async function wrapEnvironment(env, operation) {
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.wrapEnvironment = wrapEnvironment;
|
||||
/**
|
||||
* Get the path to the code scanning configuration generated by the CLI.
|
||||
*
|
||||
* This will not exist if the configuration is being parsed in the Action.
|
||||
*/
|
||||
function getGeneratedCodeScanningConfigPath(config) {
|
||||
return path.resolve(config.tempDir, "user-config.yaml");
|
||||
}
|
||||
exports.getGeneratedCodeScanningConfigPath = getGeneratedCodeScanningConfigPath;
|
||||
//# sourceMappingURL=config-utils.js.map
|
||||
File diff suppressed because one or more lines are too long
80
lib/config-utils.test.js
generated
80
lib/config-utils.test.js
generated
@@ -1114,8 +1114,8 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
java: ["a", "b"],
|
||||
go: ["c", "d"],
|
||||
python: ["e", "f"],
|
||||
}, undefined, // registries
|
||||
sampleApiDetails, tmpDir, logger);
|
||||
}, sampleApiDetails, undefined, // registriesAuthTokens
|
||||
tmpDir, logger);
|
||||
// Expecting packs to be downloaded once for java and once for python
|
||||
t.deepEqual(packDownloadStub.callCount, 2);
|
||||
// no config file was created, so pass `undefined` as the config file path
|
||||
@@ -1128,9 +1128,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = undefined;
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
// no slash
|
||||
url: "http://ghcr.io",
|
||||
@@ -1143,8 +1143,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
]);
|
||||
// append a slash to the first url
|
||||
const registries = yaml.load(registriesInput);
|
||||
const expectedRegistries = registries.map((r, i) => ({
|
||||
packages: r.packages,
|
||||
url: i === 0 ? `${r.url}/` : r.url,
|
||||
@@ -1173,7 +1174,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
java: ["a", "b"],
|
||||
go: ["c", "d"],
|
||||
python: ["e", "f"],
|
||||
}, registries, sampleApiDetails, tmpDir, logger);
|
||||
}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||
// Same packs are downloaded as in previous test
|
||||
t.deepEqual(packDownloadStub.callCount, 2);
|
||||
t.deepEqual(packDownloadStub.firstCall.args, [
|
||||
@@ -1186,7 +1187,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
]);
|
||||
// Verify that the env vars were unset.
|
||||
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "not-a-registries-auth");
|
||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
||||
@@ -1196,7 +1197,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
url: "http://ghcr.io",
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
@@ -1207,12 +1208,12 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
]);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
getVersion: () => Promise.resolve("2.10.3"),
|
||||
});
|
||||
await t.throwsAsync(async () => {
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||
});
|
||||
});
|
||||
@@ -1223,7 +1224,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const registries = [
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
// missing url property
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
@@ -1234,15 +1235,68 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
packages: "semmle/*",
|
||||
token: "still-not-a-token",
|
||||
},
|
||||
];
|
||||
]);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
getVersion: () => Promise.resolve("2.10.4"),
|
||||
});
|
||||
await t.throwsAsync(async () => {
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
});
|
||||
});
|
||||
// the happy path for generateRegistries is already tested in downloadPacks.
|
||||
// these following tests are for the error cases and when nothing is generated.
|
||||
(0, ava_1.default)("no generateRegistries when CLI is too old", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
// no slash
|
||||
url: "http://ghcr.io",
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
token: "not-a-token",
|
||||
},
|
||||
]);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
// Accepted CLI versions are 2.10.4 or higher
|
||||
getVersion: () => Promise.resolve("2.10.3"),
|
||||
});
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
await t.throwsAsync(async () => await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger), undefined, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("no generateRegistries when registries is undefined", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
const registriesInput = undefined;
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
// Accepted CLI versions are 2.10.4 or higher
|
||||
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||
});
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||
t.is(registriesAuthTokens, undefined);
|
||||
t.is(qlconfigFile, undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "original";
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
url: "http://ghcr.io",
|
||||
packages: ["codeql/*", "dsp-testing/*"],
|
||||
token: "not-a-token",
|
||||
},
|
||||
]);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
// Accepted CLI versions are 2.10.4 or higher
|
||||
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||
});
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||
t.is(registriesAuthTokens, "original");
|
||||
t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml"));
|
||||
});
|
||||
});
|
||||
// getLanguages
|
||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||
// eslint-disable-next-line github/array-foreach
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-20230207",
|
||||
"cliVersion": "2.12.2",
|
||||
"priorBundleVersion": "codeql-bundle-20230120",
|
||||
"priorCliVersion": "2.12.1"
|
||||
"bundleVersion": "codeql-bundle-20230317",
|
||||
"cliVersion": "2.12.5",
|
||||
"priorBundleVersion": "codeql-bundle-20230304",
|
||||
"priorCliVersion": "2.12.4"
|
||||
}
|
||||
|
||||
67
lib/feature-flags.js
generated
67
lib/feature-flags.js
generated
@@ -36,30 +36,41 @@ var Feature;
|
||||
(function (Feature) {
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||
Feature["ExportCodeScanningConfigEnabled"] = "export_code_scanning_config_enabled";
|
||||
Feature["ExportDiagnosticsEnabled"] = "export_diagnostics_enabled";
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.DisableKotlinAnalysisEnabled]: {
|
||||
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.CliConfigFileEnabled]: {
|
||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||
minimumVersion: "2.11.6",
|
||||
defaultValue: true,
|
||||
},
|
||||
[Feature.ExportCodeScanningConfigEnabled]: {
|
||||
envVar: "CODEQL_ACTION_EXPORT_CODE_SCANNING_CONFIG",
|
||||
minimumVersion: "2.12.3",
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.ExportDiagnosticsEnabled]: {
|
||||
envVar: "CODEQL_ACTION_EXPORT_DIAGNOSTICS",
|
||||
minimumVersion: "2.12.4",
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: "2.7.5",
|
||||
},
|
||||
[Feature.TrapCachingEnabled]: {
|
||||
envVar: "CODEQL_TRAP_CACHING",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
defaultValue: false,
|
||||
},
|
||||
};
|
||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
@@ -70,6 +81,7 @@ exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
*/
|
||||
class Features {
|
||||
constructor(gitHubVersion, repositoryNwo, tempDir, logger) {
|
||||
this.logger = logger;
|
||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(gitHubVersion, repositoryNwo, path.join(tempDir, exports.FEATURE_FLAGS_FILE_NAME), logger);
|
||||
}
|
||||
async getDefaultCliVersion(variant) {
|
||||
@@ -94,21 +106,36 @@ class Features {
|
||||
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
||||
// Do not use this feature if user explicitly disables it via an environment variable.
|
||||
if (envVar === "false") {
|
||||
this.logger.debug(`Feature ${feature} is disabled via the environment variable ${exports.featureConfig[feature].envVar}.`);
|
||||
return false;
|
||||
}
|
||||
// Never use this feature if the CLI version explicitly can't support it.
|
||||
const minimumVersion = exports.featureConfig[feature].minimumVersion;
|
||||
if (codeql && minimumVersion) {
|
||||
if (!(await util.codeQlVersionAbove(codeql, minimumVersion))) {
|
||||
this.logger.debug(`Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum ` +
|
||||
`version ${minimumVersion}.`);
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
this.logger.debug(`CodeQL CLI version ${await codeql.getVersion()} is newer than the minimum ` +
|
||||
`version ${minimumVersion} for feature ${feature}.`);
|
||||
}
|
||||
}
|
||||
// Use this feature if user explicitly enables it via an environment variable.
|
||||
if (envVar === "true") {
|
||||
this.logger.debug(`Feature ${feature} is enabled via the environment variable ${exports.featureConfig[feature].envVar}.`);
|
||||
return true;
|
||||
}
|
||||
// Ask the GitHub API if the feature is enabled.
|
||||
return await this.gitHubFeatureFlags.getValue(feature);
|
||||
const apiValue = await this.gitHubFeatureFlags.getValue(feature);
|
||||
if (apiValue !== undefined) {
|
||||
this.logger.debug(`Feature ${feature} is ${apiValue ? "enabled" : "disabled"} via the GitHub API.`);
|
||||
return apiValue;
|
||||
}
|
||||
const defaultValue = exports.featureConfig[feature].defaultValue;
|
||||
this.logger.debug(`Feature ${feature} is ${defaultValue ? "enabled" : "disabled"} due to its default value.`);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
exports.Features = Features;
|
||||
@@ -118,7 +145,7 @@ class GitHubFeatureFlags {
|
||||
this.repositoryNwo = repositoryNwo;
|
||||
this.featureFlagsFile = featureFlagsFile;
|
||||
this.logger = logger;
|
||||
/**/
|
||||
this.hasAccessedRemoteFeatureFlags = false; // Not accessed by default.
|
||||
}
|
||||
getCliVersionFromFeatureFlag(f) {
|
||||
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
||||
@@ -139,7 +166,9 @@ class GitHubFeatureFlags {
|
||||
const defaultDotComCliVersion = await this.getDefaultDotcomCliVersion();
|
||||
return {
|
||||
cliVersion: defaultDotComCliVersion.version,
|
||||
toolsFeatureFlagsValid: defaultDotComCliVersion.toolsFeatureFlagsValid,
|
||||
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||
? defaultDotComCliVersion.toolsFeatureFlagsValid
|
||||
: undefined,
|
||||
variant,
|
||||
};
|
||||
}
|
||||
@@ -170,7 +199,9 @@ class GitHubFeatureFlags {
|
||||
`shipped with the Action. This is ${defaults.cliVersion}.`);
|
||||
return {
|
||||
version: defaults.cliVersion,
|
||||
toolsFeatureFlagsValid: false,
|
||||
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||
? false
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
||||
@@ -180,15 +211,15 @@ class GitHubFeatureFlags {
|
||||
async getValue(feature) {
|
||||
const response = await this.getAllFeatures();
|
||||
if (response === undefined) {
|
||||
this.logger.debug(`No feature flags API response for ${feature}, considering it disabled.`);
|
||||
return false;
|
||||
this.logger.debug(`No feature flags API response for ${feature}.`);
|
||||
return undefined;
|
||||
}
|
||||
const featureEnablement = response[feature];
|
||||
if (featureEnablement === undefined) {
|
||||
this.logger.debug(`Feature '${feature}' undefined in API response, considering it disabled.`);
|
||||
return false;
|
||||
const features = response[feature];
|
||||
if (features === undefined) {
|
||||
this.logger.debug(`Feature '${feature}' undefined in API response.`);
|
||||
return undefined;
|
||||
}
|
||||
return !!featureEnablement;
|
||||
return !!features;
|
||||
}
|
||||
async getAllFeatures() {
|
||||
// if we have an in memory cache, use that
|
||||
@@ -237,6 +268,7 @@ class GitHubFeatureFlags {
|
||||
// Do nothing when not running against github.com
|
||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
||||
this.hasAccessedRemoteFeatureFlags = false;
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
@@ -247,6 +279,7 @@ class GitHubFeatureFlags {
|
||||
const remoteFlags = response.data;
|
||||
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
||||
`${JSON.stringify(remoteFlags)}`);
|
||||
this.hasAccessedRemoteFeatureFlags = true;
|
||||
return remoteFlags;
|
||||
}
|
||||
catch (e) {
|
||||
@@ -255,6 +288,7 @@ class GitHubFeatureFlags {
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||
this.hasAccessedRemoteFeatureFlags = false;
|
||||
return {};
|
||||
}
|
||||
else {
|
||||
@@ -265,7 +299,6 @@ class GitHubFeatureFlags {
|
||||
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=feature-flags.js.map
|
||||
File diff suppressed because one or more lines are too long
80
lib/feature-flags.test.js
generated
80
lib/feature-flags.test.js
generated
@@ -51,9 +51,9 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
||||
(0, ava_1.default)(`All features are disabled if running against ${variant.description}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
||||
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.false(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
t.deepEqual(await features.getValue(feature, includeCodeQlIfRequired(feature)), feature_flags_1.featureConfig[feature].defaultValue);
|
||||
}
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message ===
|
||||
@@ -61,33 +61,35 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
||||
});
|
||||
});
|
||||
}
|
||||
(0, ava_1.default)("API response missing", async (t) => {
|
||||
(0, ava_1.default)("API response missing and features use default value", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
||||
t.assert((await features.getValue(feature, includeCodeQlIfRequired(feature))) ===
|
||||
feature_flags_1.featureConfig[feature].defaultValue);
|
||||
}
|
||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Features are disabled if they're not returned in API response", async (t) => {
|
||||
(0, ava_1.default)("Features use default value if they're not returned in API response", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
||||
t.assert((await features.getValue(feature, includeCodeQlIfRequired(feature))) ===
|
||||
feature_flags_1.featureConfig[feature].defaultValue);
|
||||
}
|
||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
||||
await t.throwsAsync(async () => featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.MlPoweredQueriesEnabled)), {
|
||||
await t.throwsAsync(async () => features.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.MlPoweredQueriesEnabled)), {
|
||||
message: "Encountered an error while trying to determine feature enablement: Error: some error message",
|
||||
});
|
||||
});
|
||||
@@ -95,7 +97,7 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
// set all features to false except the one we're testing
|
||||
const expectedFeatureEnablement = {};
|
||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||
@@ -105,7 +107,7 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
// retrieve the values of the actual features
|
||||
const actualFeatureEnablement = {};
|
||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||
actualFeatureEnablement[f] = await featureEnablement.getValue(f, includeCodeQlIfRequired(f));
|
||||
actualFeatureEnablement[f] = await features.getValue(f, includeCodeQlIfRequired(f));
|
||||
}
|
||||
// All features should be false except the one we're testing
|
||||
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
||||
@@ -113,35 +115,35 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
});
|
||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(false);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be disabled initially
|
||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
t.assert(!(await features.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
// set env var to true and check that the feature is now enabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
t.assert(await features.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be enabled initially
|
||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
t.assert(await features.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||
// set env var to false and check that the feature is now disabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
t.assert(!(await features.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||
});
|
||||
});
|
||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||
(0, ava_1.default)(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
await t.throwsAsync(async () => featureEnablement.getValue(feature), {
|
||||
await t.throwsAsync(async () => features.getValue(feature), {
|
||||
message: `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`,
|
||||
});
|
||||
});
|
||||
@@ -150,24 +152,24 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the minimum CLI version is below ${feature_flags_1.featureConfig[feature].minimumVersion}`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
// feature should be disabled when an old CLI version is set
|
||||
let codeql = (0, testing_utils_1.mockCodeQLVersion)("2.0.0");
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
t.assert(!(await features.getValue(feature, codeql)));
|
||||
// even setting the env var to true should not enable the feature if
|
||||
// the minimum CLI version is not met
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
t.assert(!(await features.getValue(feature, codeql)));
|
||||
// feature should be enabled when a new CLI version is set
|
||||
// and env var is not set
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "";
|
||||
codeql = (0, testing_utils_1.mockCodeQLVersion)(feature_flags_1.featureConfig[feature].minimumVersion);
|
||||
t.assert(await featureEnablement.getValue(feature, codeql));
|
||||
t.assert(await features.getValue(feature, codeql));
|
||||
// set env var to false and check that the feature is now disabled
|
||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
||||
t.assert(!(await features.getValue(feature, codeql)));
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -184,12 +186,12 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
});
|
||||
(0, ava_1.default)("Feature flags are saved to disk", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
||||
t.false(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should not exist before getting feature flags");
|
||||
t.true(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||
t.true(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
||||
const actualFeatureEnablement = JSON.parse(fs.readFileSync(cachedFeatureFlags, "utf8"));
|
||||
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
||||
@@ -197,20 +199,20 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
actualFeatureEnablement[feature_flags_1.Feature.CliConfigFileEnabled] = false;
|
||||
fs.writeFileSync(cachedFeatureFlags, JSON.stringify(actualFeatureEnablement));
|
||||
// delete the in memory cache so that we are forced to use the cached file
|
||||
featureEnablement.gitHubFeatureFlags.cachedApiResponse = undefined;
|
||||
t.false(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled after reading from cached file");
|
||||
features.gitHubFeatureFlags.cachedApiResponse = undefined;
|
||||
t.false(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled after reading from cached file");
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Environment variable can override feature flag cache", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
||||
t.true(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||
t.true(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
||||
process.env.CODEQL_PASS_CONFIG_TO_CLI = "false";
|
||||
t.false(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be disabled after setting env var");
|
||||
t.false(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be disabled after setting env var");
|
||||
});
|
||||
});
|
||||
for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
@@ -228,7 +230,7 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
}
|
||||
(0, ava_1.default)("selects CLI v2.12.1 on Dotcom when feature flags enable v2.12.0 and v2.12.1", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||
@@ -237,7 +239,7 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_4_enabled"] = false;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_5_enabled"] = false;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.12.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
@@ -247,10 +249,10 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
});
|
||||
(0, ava_1.default)(`selects CLI from defaults.json on Dotcom when no default version feature flags are enabled`, async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: defaults.cliVersion,
|
||||
toolsFeatureFlagsValid: false,
|
||||
@@ -261,14 +263,14 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
(0, ava_1.default)("ignores invalid version numbers in default version feature flags", async (t) => {
|
||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||
const expectedFeatureEnablement = initializeFeatures(true);
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||
expectedFeatureEnablement["default_codeql_version_2_12_invalid_enabled"] =
|
||||
true;
|
||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||
t.deepEqual(defaultCliVersion, {
|
||||
cliVersion: "2.12.1",
|
||||
toolsFeatureFlagsValid: true,
|
||||
@@ -283,7 +285,7 @@ function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
v.message.includes(feature) &&
|
||||
v.message.includes("considering it disabled")) !== undefined);
|
||||
v.message.includes("undefined in API response")) !== undefined);
|
||||
}
|
||||
}
|
||||
function initializeFeatures(initialValue) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
23
lib/init-action-post-helper.js
generated
23
lib/init-action-post-helper.js
generated
@@ -43,12 +43,12 @@ function createFailedUploadFailedSarifResult(error) {
|
||||
* Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF
|
||||
* category for the workflow.
|
||||
*/
|
||||
async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
||||
async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) {
|
||||
if (!config.codeQLCmd) {
|
||||
return { upload_failed_run_skipped_because: "CodeQL command not found" };
|
||||
}
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||
return { upload_failed_run_skipped_because: "Feature disabled" };
|
||||
}
|
||||
const workflow = await (0, workflow_1.getWorkflow)();
|
||||
@@ -60,17 +60,26 @@ async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement,
|
||||
}
|
||||
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||
const databasePath = config.dbLocation;
|
||||
const sarifFile = "../codeql-failed-run.sarif";
|
||||
await codeql.diagnosticsExport(sarifFile, category);
|
||||
// If there is no database or the feature flag is off, we run 'export diagnostics'
|
||||
if (databasePath === undefined ||
|
||||
!(await features.getValue(feature_flags_1.Feature.ExportDiagnosticsEnabled, codeql))) {
|
||||
await codeql.diagnosticsExport(sarifFile, category, config, features);
|
||||
}
|
||||
else {
|
||||
// We call 'database export-diagnostics' to find any per-database diagnostics.
|
||||
await codeql.databaseExportDiagnostics(databasePath, sarifFile, category);
|
||||
}
|
||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||
return uploadResult?.statusReport ?? {};
|
||||
}
|
||||
async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger) {
|
||||
async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) {
|
||||
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
||||
try {
|
||||
return await maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||
return await maybeUploadFailedSarif(config, repositoryNwo, features, logger);
|
||||
}
|
||||
catch (e) {
|
||||
logger.debug(`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`);
|
||||
@@ -84,13 +93,13 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablemen
|
||||
}
|
||||
}
|
||||
exports.tryUploadSarifIfRunFailed = tryUploadSarifIfRunFailed;
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, features, logger) {
|
||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||
return;
|
||||
}
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger);
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger);
|
||||
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
||||
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
||||
`${uploadFailedSarifResult.upload_failed_run_skipped_because}.`);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AApDD,kBAoDC"}
|
||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAC5E,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAEvC,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAE/C,kFAAkF;IAClF,IACE,YAAY,KAAK,SAAS;QAC1B,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EACpE;QACA,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;KACvE;SAAM;QACL,8EAA8E;QAC9E,MAAM,MAAM,CAAC,yBAAyB,CAAC,YAAY,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC;KAC3E;IAED,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IAEF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AArDD,kBAqDC"}
|
||||
73
lib/init-action-post-helper.test.js
generated
73
lib/init-action-post-helper.test.js
generated
@@ -84,7 +84,7 @@ const workflow = __importStar(require("./workflow"));
|
||||
t.assert(printDebugLogsSpy.called);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run for typical workflow", async (t) => {
|
||||
(0, ava_1.default)("uploads failed SARIF run with `diagnostics export` if feature flag is off", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
@@ -107,6 +107,58 @@ const workflow = __importStar(require("./workflow"));
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run with `diagnostics export` if the database doesn't exist", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
category: "my-category",
|
||||
databaseExists: false,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("uploads failed SARIF run with database export-diagnostics if the database exists and feature flag is on", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
name: "Checkout repository",
|
||||
uses: "actions/checkout@v3",
|
||||
},
|
||||
{
|
||||
name: "Initialize CodeQL",
|
||||
uses: "github/codeql-action/init@v2",
|
||||
with: {
|
||||
languages: "javascript",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Perform CodeQL Analysis",
|
||||
uses: "github/codeql-action/analyze@v2",
|
||||
with: {
|
||||
category: "my-category",
|
||||
},
|
||||
},
|
||||
]);
|
||||
await testFailedSarifUpload(t, actionsWorkflow, {
|
||||
category: "my-category",
|
||||
exportDiagnosticsEnabled: true,
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("doesn't upload failed SARIF for workflow with upload: false", async (t) => {
|
||||
const actionsWorkflow = createTestWorkflow([
|
||||
{
|
||||
@@ -221,13 +273,16 @@ function createTestWorkflow(steps) {
|
||||
},
|
||||
};
|
||||
}
|
||||
async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUpload = true, matrix = {}, } = {}) {
|
||||
async function testFailedSarifUpload(t, actionsWorkflow, { category, databaseExists = true, exportDiagnosticsEnabled = false, expectUpload = true, matrix = {}, } = {}) {
|
||||
const config = {
|
||||
codeQLCmd: "codeql",
|
||||
debugMode: true,
|
||||
languages: [],
|
||||
packs: [],
|
||||
};
|
||||
if (databaseExists) {
|
||||
config.dbLocation = "path/to/database";
|
||||
}
|
||||
process.env["GITHUB_JOB"] = "analyze";
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["GITHUB_WORKSPACE"] =
|
||||
@@ -238,6 +293,7 @@ async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUploa
|
||||
.returns(JSON.stringify(matrix));
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||
const databaseExportDiagnosticsStub = sinon.stub(codeqlObject, "databaseExportDiagnostics");
|
||||
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||
@@ -246,7 +302,11 @@ async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUploa
|
||||
statusReport: { raw_upload_size_bytes: 20, zipped_upload_size_bytes: 10 },
|
||||
});
|
||||
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||
const result = await initActionPostHelper.tryUploadSarifIfRunFailed(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.UploadFailedSarifEnabled]), (0, logging_1.getRunnerLogger)(true));
|
||||
const features = [feature_flags_1.Feature.UploadFailedSarifEnabled];
|
||||
if (exportDiagnosticsEnabled) {
|
||||
features.push(feature_flags_1.Feature.ExportDiagnosticsEnabled);
|
||||
}
|
||||
const result = await initActionPostHelper.tryUploadSarifIfRunFailed(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)(features), (0, logging_1.getRunnerLogger)(true));
|
||||
if (expectUpload) {
|
||||
t.deepEqual(result, {
|
||||
raw_upload_size_bytes: 20,
|
||||
@@ -254,7 +314,12 @@ async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUploa
|
||||
});
|
||||
}
|
||||
if (expectUpload) {
|
||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||
if (databaseExists && exportDiagnosticsEnabled) {
|
||||
t.true(databaseExportDiagnosticsStub.calledOnceWith(config.dbLocation, sinon.match.string, category), `Actual args were: ${databaseExportDiagnosticsStub.args}`);
|
||||
}
|
||||
else {
|
||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category, config, sinon.match.any), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||
}
|
||||
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||
isUnsuccessfulExecution: true,
|
||||
|
||||
File diff suppressed because one or more lines are too long
24
lib/init-action.js
generated
24
lib/init-action.js
generated
@@ -46,12 +46,13 @@ async function sendInitStatusReport(actionStatus, startedAt, config, toolsDownlo
|
||||
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
||||
workflow_languages: workflowLanguages || "",
|
||||
};
|
||||
let initToolsDownloadFields = {};
|
||||
if (toolsSource === init_1.ToolsSource.Download) {
|
||||
initToolsDownloadFields = {
|
||||
tools_download_duration_ms: toolsDownloadDurationMs,
|
||||
tools_feature_flags_valid: toolsFeatureFlagsValid,
|
||||
};
|
||||
const initToolsDownloadFields = {};
|
||||
if (toolsDownloadDurationMs !== undefined) {
|
||||
initToolsDownloadFields.tools_download_duration_ms =
|
||||
toolsDownloadDurationMs;
|
||||
}
|
||||
if (toolsFeatureFlagsValid !== undefined) {
|
||||
initToolsDownloadFields.tools_feature_flags_valid = toolsFeatureFlagsValid;
|
||||
}
|
||||
if (config !== undefined) {
|
||||
const languages = config.languages.join(",");
|
||||
@@ -112,6 +113,7 @@ async function run() {
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||
const registriesInput = (0, actions_util_1.getOptionalInput)("registries");
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||
try {
|
||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||
@@ -128,7 +130,7 @@ async function run() {
|
||||
toolsVersion = initCodeQLResult.toolsVersion;
|
||||
toolsSource = initCodeQLResult.toolsSource;
|
||||
await (0, util_1.enrichEnvironment)(codeql);
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("registries"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), await getTrapCachingEnabled(features),
|
||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), registriesInput, (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), getTrapCachingEnabled(),
|
||||
// Debug mode is enabled if:
|
||||
// - The `init` Action is passed `debug: true`.
|
||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||
@@ -172,7 +174,7 @@ async function run() {
|
||||
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
|
||||
}
|
||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", features, logger);
|
||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", registriesInput, features, apiDetails, logger);
|
||||
if (tracerConfig !== undefined) {
|
||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||
core.exportVariable(key, value);
|
||||
@@ -192,7 +194,7 @@ async function run() {
|
||||
}
|
||||
await sendInitStatusReport("success", startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger);
|
||||
}
|
||||
async function getTrapCachingEnabled(featureEnablement) {
|
||||
function getTrapCachingEnabled() {
|
||||
// If the workflow specified something always respect that
|
||||
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
||||
if (trapCaching !== undefined)
|
||||
@@ -200,8 +202,8 @@ async function getTrapCachingEnabled(featureEnablement) {
|
||||
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
||||
if (!(0, util_1.isHostedRunner)())
|
||||
return false;
|
||||
// On hosted runners, respect the feature flag
|
||||
return await featureEnablement.getValue(feature_flags_1.Feature.TrapCachingEnabled);
|
||||
// On hosted runners, enable TRAP caching by default
|
||||
return true;
|
||||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
|
||||
File diff suppressed because one or more lines are too long
22
lib/init.js
generated
22
lib/init.js
generated
@@ -49,20 +49,34 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
|
||||
return { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion };
|
||||
}
|
||||
exports.initCodeQL = initCodeQL;
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||
logger.startGroup("Load language configuration");
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||
analysisPaths.printPathFiltersWarning(config, logger);
|
||||
logger.endGroup();
|
||||
return config;
|
||||
}
|
||||
exports.initConfig = initConfig;
|
||||
async function runInit(codeql, config, sourceRoot, processName, featureEnablement, logger) {
|
||||
async function runInit(codeql, config, sourceRoot, processName, registriesInput, features, apiDetails, logger) {
|
||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||
try {
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// When parsing the codeql config in the CLI, we have not yet created the qlconfig file.
|
||||
// So, create it now.
|
||||
// If we are parsing the config file in the Action, then the qlconfig file was already created
|
||||
// before the `pack download` command was invoked. It is not required for the init command.
|
||||
let registriesAuthTokens;
|
||||
let qlconfigFile;
|
||||
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||
({ registriesAuthTokens, qlconfigFile } =
|
||||
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||
}
|
||||
await configUtils.wrapEnvironment({
|
||||
GITHUB_TOKEN: apiDetails.auth,
|
||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||
},
|
||||
// Init a database cluster
|
||||
await codeql.databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger);
|
||||
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger));
|
||||
}
|
||||
else {
|
||||
for (const language of config.languages) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/shared-environment.js
generated
7
lib/shared-environment.js
generated
@@ -1,6 +1,11 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = void 0;
|
||||
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY = exports.CODEQL_ACTION_DID_AUTOBUILD_GOLANG = void 0;
|
||||
/**
|
||||
* Environment variable that is set to true when the CodeQL Action has invoked
|
||||
* the Go autobuilder.
|
||||
*/
|
||||
exports.CODEQL_ACTION_DID_AUTOBUILD_GOLANG = "CODEQL_ACTION_DID_AUTOBUILD_GOLANG";
|
||||
/**
|
||||
* This environment variable is set to true when the `analyze` Action
|
||||
* completes successfully.
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACU,QAAA,+CAA+C,GAC1D,iDAAiD,CAAC;AAEvC,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;GAGG;AACU,QAAA,kCAAkC,GAC7C,oCAAoC,CAAC;AAEvC;;;GAGG;AACU,QAAA,+CAA+C,GAC1D,iDAAiD,CAAC;AAEvC,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||
4
lib/upload-lib.js
generated
4
lib/upload-lib.js
generated
@@ -330,7 +330,9 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
||||
else {
|
||||
util.assertNever(status);
|
||||
}
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS, {
|
||||
allowProcessExit: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
finally {
|
||||
|
||||
File diff suppressed because one or more lines are too long
48
lib/util.js
generated
48
lib/util.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
@@ -58,11 +58,6 @@ exports.DEFAULT_DEBUG_ARTIFACT_NAME = "debug-artifacts";
|
||||
* Default name of the database in the debugging artifact.
|
||||
*/
|
||||
exports.DEFAULT_DEBUG_DATABASE_NAME = "db";
|
||||
/**
|
||||
* Environment variable that is set to "true" when the CodeQL Action has invoked
|
||||
* the Go autobuilder.
|
||||
*/
|
||||
exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = "CODEQL_ACTION_DID_AUTOBUILD_GOLANG";
|
||||
/**
|
||||
* Get the extra options for the codeql commands.
|
||||
*/
|
||||
@@ -265,6 +260,7 @@ var GitHubVariant;
|
||||
GitHubVariant[GitHubVariant["DOTCOM"] = 0] = "DOTCOM";
|
||||
GitHubVariant[GitHubVariant["GHES"] = 1] = "GHES";
|
||||
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
||||
GitHubVariant[GitHubVariant["GHE_DOTCOM"] = 3] = "GHE_DOTCOM";
|
||||
})(GitHubVariant = exports.GitHubVariant || (exports.GitHubVariant = {}));
|
||||
async function getGitHubVersion(apiDetails) {
|
||||
// We can avoid making an API request in the standard dotcom case
|
||||
@@ -283,6 +279,9 @@ async function getGitHubVersion(apiDetails) {
|
||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "GitHub AE") {
|
||||
return { type: GitHubVariant.GHAE };
|
||||
}
|
||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") {
|
||||
return { type: GitHubVariant.GHE_DOTCOM };
|
||||
}
|
||||
const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER];
|
||||
return { type: GitHubVariant.GHES, version };
|
||||
}
|
||||
@@ -455,16 +454,33 @@ async function bundleDb(config, language, codeql, dbName) {
|
||||
return databaseBundlePath;
|
||||
}
|
||||
exports.bundleDb = bundleDb;
|
||||
async function delay(milliseconds) {
|
||||
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
||||
// surrounding promise is being awaited.
|
||||
return new Promise((resolve) => setTimeout(resolve, milliseconds).unref());
|
||||
/**
|
||||
* @param milliseconds time to delay
|
||||
* @param opts options
|
||||
* @param opts.allowProcessExit if true, the timer will not prevent the process from exiting
|
||||
*/
|
||||
async function delay(milliseconds, { allowProcessExit }) {
|
||||
return new Promise((resolve) => {
|
||||
const timer = setTimeout(resolve, milliseconds);
|
||||
if (allowProcessExit) {
|
||||
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
||||
// surrounding promise is being awaited.
|
||||
timer.unref();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.delay = delay;
|
||||
function isGoodVersion(versionSpec) {
|
||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||
}
|
||||
exports.isGoodVersion = isGoodVersion;
|
||||
/**
|
||||
* Checks whether the CodeQL CLI supports the `--expect-discarded-cache` command-line flag.
|
||||
*/
|
||||
async function supportExpectDiscardedCache(codeQL) {
|
||||
return codeQlVersionAbove(codeQL, "2.12.1");
|
||||
}
|
||||
exports.supportExpectDiscardedCache = supportExpectDiscardedCache;
|
||||
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
||||
/**
|
||||
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||
@@ -543,12 +559,12 @@ exports.isInTestMode = isInTestMode;
|
||||
* @returns true if the action should generate a conde-scanning config file
|
||||
* that gets passed to the CLI.
|
||||
*/
|
||||
async function useCodeScanningConfigInCli(codeql, featureEnablement) {
|
||||
return await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
||||
async function useCodeScanningConfigInCli(codeql, features) {
|
||||
return await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
||||
}
|
||||
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
||||
async function logCodeScanningConfigInCli(codeql, featureEnablement, logger) {
|
||||
if (await useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
||||
async function logCodeScanningConfigInCli(codeql, features, logger) {
|
||||
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
||||
}
|
||||
else {
|
||||
@@ -636,7 +652,7 @@ async function withTimeout(timeoutMs, promise, onTimeout) {
|
||||
return result;
|
||||
};
|
||||
const timeoutTask = async () => {
|
||||
await delay(timeoutMs);
|
||||
await delay(timeoutMs, { allowProcessExit: true });
|
||||
if (!finished) {
|
||||
// Workaround: While the promise racing below will allow the main code
|
||||
// to continue, the process won't normally exit until the asynchronous
|
||||
@@ -659,7 +675,7 @@ exports.withTimeout = withTimeout;
|
||||
async function checkForTimeout() {
|
||||
if (hadTimeout === true) {
|
||||
core.info("A timeout occurred, force exiting the process after 30 seconds to prevent hanging.");
|
||||
await delay(30000);
|
||||
await delay(30000, { allowProcessExit: true });
|
||||
process.exit();
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
7
lib/util.test.js
generated
7
lib/util.test.js
generated
@@ -182,6 +182,13 @@ function mockGetMetaVersionHeader(versionHeader) {
|
||||
apiURL: undefined,
|
||||
});
|
||||
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
|
||||
mockGetMetaVersionHeader("ghe.com");
|
||||
const gheDotcom = await util.getGitHubVersion({
|
||||
auth: "",
|
||||
url: "https://foo.ghe.com",
|
||||
apiURL: undefined,
|
||||
});
|
||||
t.deepEqual({ type: util.GitHubVariant.GHE_DOTCOM }, gheDotcom);
|
||||
});
|
||||
const ML_POWERED_JS_STATUS_TESTS = [
|
||||
// If no packs are loaded, status is false.
|
||||
|
||||
File diff suppressed because one or more lines are too long
301
node_modules/.package-lock.json
generated
vendored
301
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "2.2.3",
|
||||
"version": "2.2.8",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
@@ -142,16 +142,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@ava/typescript": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@ava/typescript/-/typescript-3.0.1.tgz",
|
||||
"integrity": "sha512-/JXIUuKsvkaneaiA9ckk3ksFTqvu0mDNlChASrTe2BnDsvMbhQdPWyqQjJ9WRJWVhhs5TWn1/0Pp1G6Rv8Syrw==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@ava/typescript/-/typescript-4.0.0.tgz",
|
||||
"integrity": "sha512-QFIPeqkEbdvn7Pob0wVeYpeZD0eXd8nDYdCl+knJVaIJrHdF2fXa58vFaig26cmYwnsEN0KRNTYJKbqW1B0lfg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^5.0.0",
|
||||
"execa": "^5.1.1"
|
||||
"execa": "^7.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
|
||||
"node": ">=14.19 <15 || >=16.15 <17 || >=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@ava/typescript/node_modules/escape-string-regexp": {
|
||||
@@ -867,14 +867,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "5.48.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.48.2.tgz",
|
||||
"integrity": "sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==",
|
||||
"version": "5.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.56.0.tgz",
|
||||
"integrity": "sha512-sn1OZmBxUsgxMmR8a8U5QM/Wl+tyqlH//jTqCg8daTAmhAk26L2PFhcqPLlYBhYUJMZJK276qLXlHN3a83o2cg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "5.48.2",
|
||||
"@typescript-eslint/types": "5.48.2",
|
||||
"@typescript-eslint/typescript-estree": "5.48.2",
|
||||
"@typescript-eslint/scope-manager": "5.56.0",
|
||||
"@typescript-eslint/types": "5.56.0",
|
||||
"@typescript-eslint/typescript-estree": "5.56.0",
|
||||
"debug": "^4.3.4"
|
||||
},
|
||||
"engines": {
|
||||
@@ -893,6 +893,89 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "5.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.56.0.tgz",
|
||||
"integrity": "sha512-jGYKyt+iBakD0SA5Ww8vFqGpoV2asSjwt60Gl6YcO8ksQ8s2HlUEyHBMSa38bdLopYqGf7EYQMUIGdT/Luw+sw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "5.56.0",
|
||||
"@typescript-eslint/visitor-keys": "5.56.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": {
|
||||
"version": "5.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.56.0.tgz",
|
||||
"integrity": "sha512-JyAzbTJcIyhuUhogmiu+t79AkdnqgPUEsxMTMc/dCZczGMJQh1MK2wgrju++yMN6AWroVAy2jxyPcPr3SWCq5w==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "5.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.56.0.tgz",
|
||||
"integrity": "sha512-41CH/GncsLXOJi0jb74SnC7jVPWeVJ0pxQj8bOjH1h2O26jXN3YHKDT1ejkVz5YeTEQPeLCCRY0U2r68tfNOcg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "5.56.0",
|
||||
"@typescript-eslint/visitor-keys": "5.56.0",
|
||||
"debug": "^4.3.4",
|
||||
"globby": "^11.1.0",
|
||||
"is-glob": "^4.0.3",
|
||||
"semver": "^7.3.7",
|
||||
"tsutils": "^3.21.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"typescript": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "5.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.56.0.tgz",
|
||||
"integrity": "sha512-1mFdED7u5bZpX6Xxf5N9U2c18sb+8EvU3tyOIj6LQZ5OOvnmj8BVeNNP603OFPm5KkS1a7IvCIcwrdHXaEMG/Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "5.56.0",
|
||||
"eslint-visitor-keys": "^3.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser/node_modules/eslint-visitor-keys": {
|
||||
"version": "3.3.0",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz",
|
||||
"integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "5.48.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.48.2.tgz",
|
||||
@@ -3043,40 +3126,28 @@
|
||||
}
|
||||
},
|
||||
"node_modules/execa": {
|
||||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
|
||||
"integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/execa/-/execa-7.1.0.tgz",
|
||||
"integrity": "sha512-T6nIJO3LHxUZ6ahVRaxXz9WLEruXLqdcluA+UuTptXmLM7nDAn9lx9IfkxPyzEL21583qSt4RmL44pO71EHaJQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"cross-spawn": "^7.0.3",
|
||||
"get-stream": "^6.0.0",
|
||||
"human-signals": "^2.1.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"get-stream": "^6.0.1",
|
||||
"human-signals": "^4.3.0",
|
||||
"is-stream": "^3.0.0",
|
||||
"merge-stream": "^2.0.0",
|
||||
"npm-run-path": "^4.0.1",
|
||||
"onetime": "^5.1.2",
|
||||
"signal-exit": "^3.0.3",
|
||||
"strip-final-newline": "^2.0.0"
|
||||
"npm-run-path": "^5.1.0",
|
||||
"onetime": "^6.0.0",
|
||||
"signal-exit": "^3.0.7",
|
||||
"strip-final-newline": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
"node": "^14.18.0 || ^16.14.0 || >=18.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sindresorhus/execa?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/execa/node_modules/get-stream": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
|
||||
"integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/fast-deep-equal": {
|
||||
"version": "3.1.3",
|
||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||
@@ -3328,6 +3399,18 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-stream": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
|
||||
"integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/get-symbol-description": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
|
||||
@@ -3354,19 +3437,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "8.0.1",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz",
|
||||
"integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==",
|
||||
"version": "9.2.1",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-9.2.1.tgz",
|
||||
"integrity": "sha512-Pxxgq3W0HyA3XUvSXcFhRSs+43Jsx0ddxcFrbjxNGkL2Ak5BAUBxLqI5G6ADDeCHLfzzXFhe0b1yYcctGmytMA==",
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^5.0.1",
|
||||
"once": "^1.3.0",
|
||||
"path-is-absolute": "^1.0.0"
|
||||
"minimatch": "^7.4.1",
|
||||
"minipass": "^4.2.4",
|
||||
"path-scurry": "^1.6.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
@@ -3392,14 +3473,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/glob/node_modules/minimatch": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
|
||||
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
|
||||
"version": "7.4.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz",
|
||||
"integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/globals": {
|
||||
@@ -3559,12 +3643,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/human-signals": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
|
||||
"integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.0.tgz",
|
||||
"integrity": "sha512-zyzVyMjpGBX2+6cDVZeFPCdtOtdsxOeseRhB9tkQ6xXmGUNrcnBzdEKPy3VPNYz+4gy1oukVOXcrJCunSyc6QQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=10.17.0"
|
||||
"node": ">=14.18.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ignore": {
|
||||
@@ -3938,12 +4022,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/is-stream": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz",
|
||||
"integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
@@ -4340,18 +4424,6 @@
|
||||
"url": "https://github.com/sindresorhus/mem?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/mem/node_modules/mimic-fn": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz",
|
||||
"integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/merge-stream": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
|
||||
@@ -4398,11 +4470,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mimic-fn": {
|
||||
"version": "2.1.0",
|
||||
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz",
|
||||
"integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
@@ -4425,6 +4501,14 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/minipass": {
|
||||
"version": "4.2.4",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
|
||||
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||
@@ -4531,15 +4615,30 @@
|
||||
}
|
||||
},
|
||||
"node_modules/npm-run-path": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
|
||||
"integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz",
|
||||
"integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"path-key": "^3.0.0"
|
||||
"path-key": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/npm-run-path/node_modules/path-key": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz",
|
||||
"integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
@@ -4650,15 +4749,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/onetime": {
|
||||
"version": "5.1.2",
|
||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
|
||||
"integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz",
|
||||
"integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"mimic-fn": "^2.1.0"
|
||||
"mimic-fn": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
@@ -4839,6 +4938,29 @@
|
||||
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/path-scurry": {
|
||||
"version": "1.6.1",
|
||||
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.6.1.tgz",
|
||||
"integrity": "sha512-OW+5s+7cw6253Q4E+8qQ/u1fVvcJQCJo/VFD8pje+dbJCF1n5ZRMV2AEHbGp+5Q7jxQIYJxkHopnj6nzdGeZLA==",
|
||||
"dependencies": {
|
||||
"lru-cache": "^7.14.1",
|
||||
"minipass": "^4.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/path-scurry/node_modules/lru-cache": {
|
||||
"version": "7.18.3",
|
||||
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/path-to-regexp": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
|
||||
@@ -5472,12 +5594,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/strip-final-newline": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
|
||||
"integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz",
|
||||
"integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/strip-json-comments": {
|
||||
@@ -5791,16 +5916,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "4.9.4",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz",
|
||||
"integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==",
|
||||
"version": "5.0.2",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz",
|
||||
"integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.2.0"
|
||||
"node": ">=12.20"
|
||||
}
|
||||
},
|
||||
"node_modules/unbox-primitive": {
|
||||
|
||||
16
node_modules/@ava/typescript/README.md
generated
vendored
16
node_modules/@ava/typescript/README.md
generated
vendored
@@ -1,21 +1,11 @@
|
||||
# @ava/typescript
|
||||
|
||||
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA 4](https://avajs.dev).
|
||||
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
|
||||
|
||||
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
||||
|
||||
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
||||
|
||||
## For AVA 3 users
|
||||
|
||||
Use version 2:
|
||||
|
||||
```console
|
||||
npm install --save-dev @ava/typescript@2
|
||||
```
|
||||
|
||||
Note that v2 does not support ES modules. This requires v3 and AVA 4.
|
||||
|
||||
## Enabling TypeScript support
|
||||
|
||||
Add this package to your project:
|
||||
@@ -47,7 +37,7 @@ You can enable compilation via the `compile` property. If `false`, AVA will assu
|
||||
|
||||
Output files are expected to have the `.js` extension.
|
||||
|
||||
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs`, `*.ts`, `*.cts` and `*.mts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||
|
||||
## ES Modules
|
||||
|
||||
@@ -75,6 +65,8 @@ You can configure AVA to recognize additional file extensions. To add (partial
|
||||
}
|
||||
```
|
||||
|
||||
If you use the [`allowJs` TypeScript option](https://www.typescriptlang.org/tsconfig/allowJs.html) you'll have to specify the `js`, `cjs` and `mjs` extensions for them to be rewritten.
|
||||
|
||||
See also AVA's [`extensions` option](https://github.com/avajs/ava/blob/master/docs/06-configuration.md#options).
|
||||
|
||||
† Note that the [*preserve* mode for JSX](https://www.typescriptlang.org/docs/handbook/jsx.html) is not (yet) supported.
|
||||
|
||||
40
node_modules/@ava/typescript/index.js
generated
vendored
40
node_modules/@ava/typescript/index.js
generated
vendored
@@ -2,7 +2,7 @@ import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import {pathToFileURL} from 'node:url';
|
||||
import escapeStringRegexp from 'escape-string-regexp';
|
||||
import execa from 'execa';
|
||||
import {execa} from 'execa';
|
||||
|
||||
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
|
||||
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
|
||||
@@ -83,7 +83,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
validate(config, configProperties);
|
||||
|
||||
const {
|
||||
extensions = ['ts'],
|
||||
extensions = ['ts', 'cts', 'mts'],
|
||||
rewritePaths: relativeRewritePaths,
|
||||
compile,
|
||||
} = config;
|
||||
@@ -118,7 +118,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
return rewritePaths.some(([from]) => filePath.startsWith(from));
|
||||
},
|
||||
|
||||
resolveTestFile(testfile) {
|
||||
resolveTestFile(testfile) { // Used under AVA 3.2 protocol by legacy watcher implementation.
|
||||
if (!testFileExtension.test(testfile)) {
|
||||
return testfile;
|
||||
}
|
||||
@@ -129,8 +129,14 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
}
|
||||
|
||||
const [from, to] = rewrite;
|
||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||
let newExtension = '.js';
|
||||
if (testfile.endsWith('.cts')) {
|
||||
newExtension = '.cjs';
|
||||
} else if (testfile.endsWith('.mts')) {
|
||||
newExtension = '.mjs';
|
||||
}
|
||||
|
||||
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, newExtension);
|
||||
},
|
||||
|
||||
updateGlobs({filePatterns, ignoredByWatcherPatterns}) {
|
||||
@@ -142,7 +148,11 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
],
|
||||
ignoredByWatcherPatterns: [
|
||||
...ignoredByWatcherPatterns,
|
||||
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`),
|
||||
...Object.values(relativeRewritePaths).flatMap(to => [
|
||||
`${to}**/*.js.map`,
|
||||
`${to}**/*.cjs.map`,
|
||||
`${to}**/*.mjs.map`,
|
||||
]),
|
||||
],
|
||||
};
|
||||
},
|
||||
@@ -150,7 +160,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
},
|
||||
|
||||
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
||||
const useImport = extensionsToLoadAsModules.includes('js');
|
||||
const importJs = extensionsToLoadAsModules.includes('js');
|
||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||
|
||||
return {
|
||||
@@ -160,9 +170,19 @@ export default function typescriptProvider({negotiateProtocol}) {
|
||||
|
||||
async load(ref, {requireFn}) {
|
||||
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
||||
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
|
||||
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten); // eslint-disable-line node/no-unsupported-features/es-syntax
|
||||
let rewritten = `${to}${ref.slice(from.length)}`;
|
||||
let useImport = true;
|
||||
if (ref.endsWith('.cts')) {
|
||||
rewritten = rewritten.replace(/\.cts$/, '.cjs');
|
||||
useImport = false;
|
||||
} else if (ref.endsWith('.mts')) {
|
||||
rewritten = rewritten.replace(/\.mts$/, '.mjs');
|
||||
} else {
|
||||
rewritten = rewritten.replace(testFileExtension, '.js');
|
||||
useImport = importJs;
|
||||
}
|
||||
|
||||
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten);
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
19
node_modules/@ava/typescript/package.json
generated
vendored
19
node_modules/@ava/typescript/package.json
generated
vendored
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"name": "@ava/typescript",
|
||||
"version": "3.0.1",
|
||||
"version": "4.0.0",
|
||||
"description": "TypeScript provider for AVA",
|
||||
"engines": {
|
||||
"node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
|
||||
"node": ">=14.19 <15 || >=16.15 <17 || >=18"
|
||||
},
|
||||
"files": [
|
||||
"index.js"
|
||||
@@ -24,14 +24,14 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"escape-string-regexp": "^5.0.0",
|
||||
"execa": "^5.1.1"
|
||||
"execa": "^7.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ava": "4.0.0-rc.1",
|
||||
"c8": "^7.10.0",
|
||||
"del": "^6.0.0",
|
||||
"typescript": "^4.4.4",
|
||||
"xo": "^0.46.3"
|
||||
"ava": "^5.2.0",
|
||||
"c8": "^7.13.0",
|
||||
"del": "^7.0.0",
|
||||
"typescript": "^4.9.5",
|
||||
"xo": "^0.53.1"
|
||||
},
|
||||
"c8": {
|
||||
"reporter": [
|
||||
@@ -52,7 +52,8 @@
|
||||
},
|
||||
"xo": {
|
||||
"ignores": [
|
||||
"test/broken-fixtures"
|
||||
"test/broken-fixtures",
|
||||
"test/fixtures/**/compiled/**"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
4
node_modules/@typescript-eslint/parser/_ts3.4/dist/index.d.ts
generated
vendored
4
node_modules/@typescript-eslint/parser/_ts3.4/dist/index.d.ts
generated
vendored
@@ -1,4 +1,8 @@
|
||||
export { parse, parseForESLint, ParserOptions } from './parser';
|
||||
export { ParserServices, clearCaches, createProgram, } from '@typescript-eslint/typescript-estree';
|
||||
export declare const version: string;
|
||||
export declare const meta: {
|
||||
name: string;
|
||||
version: string;
|
||||
};
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
|
||||
4
node_modules/@typescript-eslint/parser/dist/index.d.ts
generated
vendored
4
node_modules/@typescript-eslint/parser/dist/index.d.ts
generated
vendored
@@ -1,4 +1,8 @@
|
||||
export { parse, parseForESLint, ParserOptions } from './parser';
|
||||
export { ParserServices, clearCaches, createProgram, } from '@typescript-eslint/typescript-estree';
|
||||
export declare const version: string;
|
||||
export declare const meta: {
|
||||
name: string;
|
||||
version: string;
|
||||
};
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
2
node_modules/@typescript-eslint/parser/dist/index.d.ts.map
generated
vendored
2
node_modules/@typescript-eslint/parser/dist/index.d.ts.map
generated
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,cAAc,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAChE,OAAO,EACL,cAAc,EACd,WAAW,EACX,aAAa,GACd,MAAM,sCAAsC,CAAC;AAI9C,eAAO,MAAM,OAAO,EAAE,MAA2C,CAAC"}
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,cAAc,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAChE,OAAO,EACL,cAAc,EACd,WAAW,EACX,aAAa,GACd,MAAM,sCAAsC,CAAC;AAI9C,eAAO,MAAM,OAAO,EAAE,MAA2C,CAAC;AAElE,eAAO,MAAM,IAAI;;;CAGhB,CAAC"}
|
||||
6
node_modules/@typescript-eslint/parser/dist/index.js
generated
vendored
6
node_modules/@typescript-eslint/parser/dist/index.js
generated
vendored
@@ -1,6 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.version = exports.createProgram = exports.clearCaches = exports.parseForESLint = exports.parse = void 0;
|
||||
exports.meta = exports.version = exports.createProgram = exports.clearCaches = exports.parseForESLint = exports.parse = void 0;
|
||||
var parser_1 = require("./parser");
|
||||
Object.defineProperty(exports, "parse", { enumerable: true, get: function () { return parser_1.parse; } });
|
||||
Object.defineProperty(exports, "parseForESLint", { enumerable: true, get: function () { return parser_1.parseForESLint; } });
|
||||
@@ -10,4 +10,8 @@ Object.defineProperty(exports, "createProgram", { enumerable: true, get: functio
|
||||
// note - cannot migrate this to an import statement because it will make TSC copy the package.json to the dist folder
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access
|
||||
exports.version = require('../package.json').version;
|
||||
exports.meta = {
|
||||
name: 'typescript-eslint/parser',
|
||||
version: exports.version,
|
||||
};
|
||||
//# sourceMappingURL=index.js.map
|
||||
2
node_modules/@typescript-eslint/parser/dist/index.js.map
generated
vendored
2
node_modules/@typescript-eslint/parser/dist/index.js.map
generated
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,mCAAgE;AAAvD,+FAAA,KAAK,OAAA;AAAE,wGAAA,cAAc,OAAA;AAC9B,0EAI8C;AAF5C,gHAAA,WAAW,OAAA;AACX,kHAAA,aAAa,OAAA;AAGf,sHAAsH;AACtH,+GAA+G;AAClG,QAAA,OAAO,GAAW,OAAO,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC"}
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,mCAAgE;AAAvD,+FAAA,KAAK,OAAA;AAAE,wGAAA,cAAc,OAAA;AAC9B,0EAI8C;AAF5C,gHAAA,WAAW,OAAA;AACX,kHAAA,aAAa,OAAA;AAGf,sHAAsH;AACtH,+GAA+G;AAClG,QAAA,OAAO,GAAW,OAAO,CAAC,iBAAiB,CAAC,CAAC,OAAO,CAAC;AAErD,QAAA,IAAI,GAAG;IAClB,IAAI,EAAE,0BAA0B;IAChC,OAAO,EAAP,eAAO;CACR,CAAC"}
|
||||
21
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/LICENSE
generated
vendored
Normal file
21
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 typescript-eslint and other contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
8
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/README.md
generated
vendored
Normal file
8
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/README.md
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# `@typescript-eslint/scope-manager`
|
||||
|
||||
[](https://www.npmjs.com/package/@typescript-eslint/scope-manager)
|
||||
[](https://www.npmjs.com/package/@typescript-eslint/scope-manager)
|
||||
|
||||
👉 See **https://typescript-eslint.io/architecture/scope-manager** for documentation on this package.
|
||||
|
||||
> See https://typescript-eslint.io for general documentation on typescript-eslint, the tooling that allows you to run ESLint and Prettier on TypeScript code.
|
||||
4
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.d.ts
generated
vendored
Normal file
4
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
declare function createIdGenerator(): () => number;
|
||||
declare function resetIds(): void;
|
||||
export { createIdGenerator, resetIds };
|
||||
//# sourceMappingURL=ID.d.ts.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.d.ts.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ID.d.ts","sourceRoot":"","sources":["../src/ID.ts"],"names":[],"mappings":"AAGA,iBAAS,iBAAiB,IAAI,MAAM,MAAM,CAUzC;AAED,iBAAS,QAAQ,IAAI,IAAI,CAExB;AAED,OAAO,EAAE,iBAAiB,EAAE,QAAQ,EAAE,CAAC"}
|
||||
22
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.js
generated
vendored
Normal file
22
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.resetIds = exports.createIdGenerator = void 0;
|
||||
const ID_CACHE = new Map();
|
||||
let NEXT_KEY = 0;
|
||||
function createIdGenerator() {
|
||||
const key = (NEXT_KEY += 1);
|
||||
ID_CACHE.set(key, 0);
|
||||
return () => {
|
||||
var _a;
|
||||
const current = (_a = ID_CACHE.get(key)) !== null && _a !== void 0 ? _a : 0;
|
||||
const next = current + 1;
|
||||
ID_CACHE.set(key, next);
|
||||
return next;
|
||||
};
|
||||
}
|
||||
exports.createIdGenerator = createIdGenerator;
|
||||
function resetIds() {
|
||||
ID_CACHE.clear();
|
||||
}
|
||||
exports.resetIds = resetIds;
|
||||
//# sourceMappingURL=ID.js.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.js.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ID.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ID.js","sourceRoot":"","sources":["../src/ID.ts"],"names":[],"mappings":";;;AAAA,MAAM,QAAQ,GAAG,IAAI,GAAG,EAAkB,CAAC;AAC3C,IAAI,QAAQ,GAAG,CAAC,CAAC;AAEjB,SAAS,iBAAiB;IACxB,MAAM,GAAG,GAAG,CAAC,QAAQ,IAAI,CAAC,CAAC,CAAC;IAC5B,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC;IAErB,OAAO,GAAW,EAAE;;QAClB,MAAM,OAAO,GAAG,MAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,mCAAI,CAAC,CAAC;QACvC,MAAM,IAAI,GAAG,OAAO,GAAG,CAAC,CAAC;QACzB,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC,CAAC;AACJ,CAAC;AAMQ,8CAAiB;AAJ1B,SAAS,QAAQ;IACf,QAAQ,CAAC,KAAK,EAAE,CAAC;AACnB,CAAC;AAE2B,4BAAQ"}
|
||||
73
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.d.ts
generated
vendored
Normal file
73
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.d.ts
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
import type { TSESTree } from '@typescript-eslint/types';
|
||||
import type { Scope } from './scope';
|
||||
import { BlockScope, CatchScope, ClassScope, ConditionalTypeScope, ForScope, FunctionExpressionNameScope, FunctionScope, FunctionTypeScope, GlobalScope, MappedTypeScope, ModuleScope, SwitchScope, TSEnumScope, TSModuleScope, TypeScope, WithScope } from './scope';
|
||||
import { ClassFieldInitializerScope } from './scope/ClassFieldInitializerScope';
|
||||
import { ClassStaticBlockScope } from './scope/ClassStaticBlockScope';
|
||||
import type { Variable } from './variable';
|
||||
interface ScopeManagerOptions {
|
||||
globalReturn?: boolean;
|
||||
sourceType?: 'module' | 'script';
|
||||
impliedStrict?: boolean;
|
||||
ecmaVersion?: number;
|
||||
}
|
||||
declare class ScopeManager {
|
||||
#private;
|
||||
currentScope: Scope | null;
|
||||
readonly declaredVariables: WeakMap<TSESTree.Node, Variable[]>;
|
||||
/**
|
||||
* The root scope
|
||||
* @public
|
||||
*/
|
||||
globalScope: GlobalScope | null;
|
||||
readonly nodeToScope: WeakMap<TSESTree.Node, Scope[]>;
|
||||
/**
|
||||
* All scopes
|
||||
* @public
|
||||
*/
|
||||
readonly scopes: Scope[];
|
||||
get variables(): Variable[];
|
||||
constructor(options: ScopeManagerOptions);
|
||||
isGlobalReturn(): boolean;
|
||||
isModule(): boolean;
|
||||
isImpliedStrict(): boolean;
|
||||
isStrictModeSupported(): boolean;
|
||||
isES6(): boolean;
|
||||
/**
|
||||
* Get the variables that a given AST node defines. The gotten variables' `def[].node`/`def[].parent` property is the node.
|
||||
* If the node does not define any variable, this returns an empty array.
|
||||
* @param node An AST node to get their variables.
|
||||
* @public
|
||||
*/
|
||||
getDeclaredVariables(node: TSESTree.Node): Variable[];
|
||||
/**
|
||||
* Get the scope of a given AST node. The gotten scope's `block` property is the node.
|
||||
* This method never returns `function-expression-name` scope. If the node does not have their scope, this returns `null`.
|
||||
*
|
||||
* @param node An AST node to get their scope.
|
||||
* @param inner If the node has multiple scopes, this returns the outermost scope normally.
|
||||
* If `inner` is `true` then this returns the innermost scope.
|
||||
* @public
|
||||
*/
|
||||
acquire(node: TSESTree.Node, inner?: boolean): Scope | null;
|
||||
protected nestScope<T extends Scope>(scope: T): T;
|
||||
nestBlockScope(node: BlockScope['block']): BlockScope;
|
||||
nestCatchScope(node: CatchScope['block']): CatchScope;
|
||||
nestClassScope(node: ClassScope['block']): ClassScope;
|
||||
nestClassFieldInitializerScope(node: ClassFieldInitializerScope['block']): ClassFieldInitializerScope;
|
||||
nestClassStaticBlockScope(node: ClassStaticBlockScope['block']): ClassStaticBlockScope;
|
||||
nestConditionalTypeScope(node: ConditionalTypeScope['block']): ConditionalTypeScope;
|
||||
nestForScope(node: ForScope['block']): ForScope;
|
||||
nestFunctionExpressionNameScope(node: FunctionExpressionNameScope['block']): FunctionExpressionNameScope;
|
||||
nestFunctionScope(node: FunctionScope['block'], isMethodDefinition: boolean): FunctionScope;
|
||||
nestFunctionTypeScope(node: FunctionTypeScope['block']): FunctionTypeScope;
|
||||
nestGlobalScope(node: GlobalScope['block']): GlobalScope;
|
||||
nestMappedTypeScope(node: MappedTypeScope['block']): MappedTypeScope;
|
||||
nestModuleScope(node: ModuleScope['block']): ModuleScope;
|
||||
nestSwitchScope(node: SwitchScope['block']): SwitchScope;
|
||||
nestTSEnumScope(node: TSEnumScope['block']): TSEnumScope;
|
||||
nestTSModuleScope(node: TSModuleScope['block']): TSModuleScope;
|
||||
nestTypeScope(node: TypeScope['block']): TypeScope;
|
||||
nestWithScope(node: WithScope['block']): WithScope;
|
||||
}
|
||||
export { ScopeManager };
|
||||
//# sourceMappingURL=ScopeManager.d.ts.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.d.ts.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ScopeManager.d.ts","sourceRoot":"","sources":["../src/ScopeManager.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,0BAA0B,CAAC;AAGzD,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AACrC,OAAO,EACL,UAAU,EACV,UAAU,EACV,UAAU,EACV,oBAAoB,EACpB,QAAQ,EACR,2BAA2B,EAC3B,aAAa,EACb,iBAAiB,EACjB,WAAW,EACX,eAAe,EACf,WAAW,EACX,WAAW,EACX,WAAW,EACX,aAAa,EACb,SAAS,EACT,SAAS,EACV,MAAM,SAAS,CAAC;AACjB,OAAO,EAAE,0BAA0B,EAAE,MAAM,oCAAoC,CAAC;AAChF,OAAO,EAAE,qBAAqB,EAAE,MAAM,+BAA+B,CAAC;AACtE,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AAE3C,UAAU,mBAAmB;IAC3B,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,UAAU,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;IACjC,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,cAAM,YAAY;;IACT,YAAY,EAAE,KAAK,GAAG,IAAI,CAAC;IAClC,SAAgB,iBAAiB,EAAE,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC;IACtE;;;OAGG;IACI,WAAW,EAAE,WAAW,GAAG,IAAI,CAAC;IACvC,SAAgB,WAAW,EAAE,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;IAE7D;;;OAGG;IACH,SAAgB,MAAM,EAAE,KAAK,EAAE,CAAC;IAEhC,IAAW,SAAS,IAAI,QAAQ,EAAE,CAQjC;gBAEW,OAAO,EAAE,mBAAmB;IASjC,cAAc,IAAI,OAAO;IAIzB,QAAQ,IAAI,OAAO;IAInB,eAAe,IAAI,OAAO;IAG1B,qBAAqB,IAAI,OAAO;IAIhC,KAAK,IAAI,OAAO;IAIvB;;;;;OAKG;IACI,oBAAoB,CAAC,IAAI,EAAE,QAAQ,CAAC,IAAI,GAAG,QAAQ,EAAE;IAI5D;;;;;;;;OAQG;IACI,OAAO,CAAC,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,KAAK,UAAQ,GAAG,KAAK,GAAG,IAAI;IAiChE,SAAS,CAAC,SAAS,CAAC,CAAC,SAAS,KAAK,EAAE,KAAK,EAAE,CAAC,GAAG,CAAC;IAU1C,cAAc,CAAC,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,GAAG,UAAU;IAKrD,cAAc,CAAC,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,GAAG,UAAU;IAKrD,cAAc,CAAC,IAAI,EAAE,UAAU,CAAC,OAAO,CAAC,GAAG,UAAU;IAKrD,8BAA8B,CACnC,IAAI,EAAE,0BAA0B,CAAC,OAAO,CAAC,GACxC,0BAA0B;IAOtB,yBAAyB,CAC9B,IAAI,EAAE,qBAAqB,CAAC,OAAO,CAAC,GACnC,qBAAqB;IAOjB,wBAAwB,CAC7B,IAAI,EAAE,oBAAoB,CAAC,OAAO,CAAC,GAClC,oBAAoB;IAOhB,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,OAAO,CAAC,GAAG,QAAQ;IAK/C,+BAA+B,CACpC,IAAI,EAAE,2BAA2B,CAAC,OAAO,CAAC,GACzC,2BAA2B;IAOvB,iBAAiB,CACtB,IAAI,EAAE,aAAa,CAAC,OAAO,CAAC,EAC5B,kBAAkB,EAAE,OAAO,GAC1B,aAAa;IAOT,qBAAqB,CAC1B,IAAI,EAAE,iBAAiB,CAAC,OAAO,CAAC,GAC/B,iBAAiB;IAKb,eAAe,CAAC,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW;IAIxD,mBAAmB,CAAC,IAAI,EAAE,eAAe,CAAC,OAAO,CAAC,GAAG,eAAe;IAKpE,eAAe,CAAC,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW;IAKxD,eAAe,CAAC,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW;IAKxD,eAAe,CAAC,IAAI,EAAE,WAAW,CAAC,OAAO,CAAC,GAAG,WAAW;IAKxD,iBAAiB,CAAC,IAAI,EAAE,aAAa,CAAC,OAAO,CAAC,GAAG,aAAa;IAK9D,aAAa,CAAC,IAAI,EAAE,SAAS,CAAC,OAAO,CAAC,GAAG,SAAS;IAKlD,aAAa,CAAC,IAAI,EAAE,SAAS,CAAC,OAAO,CAAC,GAAG,SAAS;CAI1D;AAED,OAAO,EAAE,YAAY,EAAE,CAAC"}
|
||||
183
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.js
generated
vendored
Normal file
183
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
"use strict";
|
||||
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
||||
if (kind === "m") throw new TypeError("Private method is not writable");
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||||
};
|
||||
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
||||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||
};
|
||||
var _ScopeManager_options;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ScopeManager = void 0;
|
||||
const assert_1 = require("./assert");
|
||||
const scope_1 = require("./scope");
|
||||
const ClassFieldInitializerScope_1 = require("./scope/ClassFieldInitializerScope");
|
||||
const ClassStaticBlockScope_1 = require("./scope/ClassStaticBlockScope");
|
||||
class ScopeManager {
|
||||
get variables() {
|
||||
const variables = new Set();
|
||||
function recurse(scope) {
|
||||
scope.variables.forEach(v => variables.add(v));
|
||||
scope.childScopes.forEach(recurse);
|
||||
}
|
||||
this.scopes.forEach(recurse);
|
||||
return Array.from(variables).sort((a, b) => a.$id - b.$id);
|
||||
}
|
||||
constructor(options) {
|
||||
_ScopeManager_options.set(this, void 0);
|
||||
this.scopes = [];
|
||||
this.globalScope = null;
|
||||
this.nodeToScope = new WeakMap();
|
||||
this.currentScope = null;
|
||||
__classPrivateFieldSet(this, _ScopeManager_options, options, "f");
|
||||
this.declaredVariables = new WeakMap();
|
||||
}
|
||||
isGlobalReturn() {
|
||||
return __classPrivateFieldGet(this, _ScopeManager_options, "f").globalReturn === true;
|
||||
}
|
||||
isModule() {
|
||||
return __classPrivateFieldGet(this, _ScopeManager_options, "f").sourceType === 'module';
|
||||
}
|
||||
isImpliedStrict() {
|
||||
return __classPrivateFieldGet(this, _ScopeManager_options, "f").impliedStrict === true;
|
||||
}
|
||||
isStrictModeSupported() {
|
||||
return __classPrivateFieldGet(this, _ScopeManager_options, "f").ecmaVersion != null && __classPrivateFieldGet(this, _ScopeManager_options, "f").ecmaVersion >= 5;
|
||||
}
|
||||
isES6() {
|
||||
return __classPrivateFieldGet(this, _ScopeManager_options, "f").ecmaVersion != null && __classPrivateFieldGet(this, _ScopeManager_options, "f").ecmaVersion >= 6;
|
||||
}
|
||||
/**
|
||||
* Get the variables that a given AST node defines. The gotten variables' `def[].node`/`def[].parent` property is the node.
|
||||
* If the node does not define any variable, this returns an empty array.
|
||||
* @param node An AST node to get their variables.
|
||||
* @public
|
||||
*/
|
||||
getDeclaredVariables(node) {
|
||||
var _a;
|
||||
return (_a = this.declaredVariables.get(node)) !== null && _a !== void 0 ? _a : [];
|
||||
}
|
||||
/**
|
||||
* Get the scope of a given AST node. The gotten scope's `block` property is the node.
|
||||
* This method never returns `function-expression-name` scope. If the node does not have their scope, this returns `null`.
|
||||
*
|
||||
* @param node An AST node to get their scope.
|
||||
* @param inner If the node has multiple scopes, this returns the outermost scope normally.
|
||||
* If `inner` is `true` then this returns the innermost scope.
|
||||
* @public
|
||||
*/
|
||||
acquire(node, inner = false) {
|
||||
var _a;
|
||||
function predicate(testScope) {
|
||||
if (testScope.type === 'function' && testScope.functionExpressionScope) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const scopes = this.nodeToScope.get(node);
|
||||
if (!scopes || scopes.length === 0) {
|
||||
return null;
|
||||
}
|
||||
// Heuristic selection from all scopes.
|
||||
// If you would like to get all scopes, please use ScopeManager#acquireAll.
|
||||
if (scopes.length === 1) {
|
||||
return scopes[0];
|
||||
}
|
||||
if (inner) {
|
||||
for (let i = scopes.length - 1; i >= 0; --i) {
|
||||
const scope = scopes[i];
|
||||
if (predicate(scope)) {
|
||||
return scope;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
return (_a = scopes.find(predicate)) !== null && _a !== void 0 ? _a : null;
|
||||
}
|
||||
nestScope(scope) {
|
||||
if (scope instanceof scope_1.GlobalScope) {
|
||||
(0, assert_1.assert)(this.currentScope == null);
|
||||
this.globalScope = scope;
|
||||
}
|
||||
this.currentScope = scope;
|
||||
return scope;
|
||||
}
|
||||
nestBlockScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.BlockScope(this, this.currentScope, node));
|
||||
}
|
||||
nestCatchScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.CatchScope(this, this.currentScope, node));
|
||||
}
|
||||
nestClassScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.ClassScope(this, this.currentScope, node));
|
||||
}
|
||||
nestClassFieldInitializerScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new ClassFieldInitializerScope_1.ClassFieldInitializerScope(this, this.currentScope, node));
|
||||
}
|
||||
nestClassStaticBlockScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new ClassStaticBlockScope_1.ClassStaticBlockScope(this, this.currentScope, node));
|
||||
}
|
||||
nestConditionalTypeScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.ConditionalTypeScope(this, this.currentScope, node));
|
||||
}
|
||||
nestForScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.ForScope(this, this.currentScope, node));
|
||||
}
|
||||
nestFunctionExpressionNameScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.FunctionExpressionNameScope(this, this.currentScope, node));
|
||||
}
|
||||
nestFunctionScope(node, isMethodDefinition) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.FunctionScope(this, this.currentScope, node, isMethodDefinition));
|
||||
}
|
||||
nestFunctionTypeScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.FunctionTypeScope(this, this.currentScope, node));
|
||||
}
|
||||
nestGlobalScope(node) {
|
||||
return this.nestScope(new scope_1.GlobalScope(this, node));
|
||||
}
|
||||
nestMappedTypeScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.MappedTypeScope(this, this.currentScope, node));
|
||||
}
|
||||
nestModuleScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.ModuleScope(this, this.currentScope, node));
|
||||
}
|
||||
nestSwitchScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.SwitchScope(this, this.currentScope, node));
|
||||
}
|
||||
nestTSEnumScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.TSEnumScope(this, this.currentScope, node));
|
||||
}
|
||||
nestTSModuleScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.TSModuleScope(this, this.currentScope, node));
|
||||
}
|
||||
nestTypeScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.TypeScope(this, this.currentScope, node));
|
||||
}
|
||||
nestWithScope(node) {
|
||||
(0, assert_1.assert)(this.currentScope);
|
||||
return this.nestScope(new scope_1.WithScope(this, this.currentScope, node));
|
||||
}
|
||||
}
|
||||
exports.ScopeManager = ScopeManager;
|
||||
_ScopeManager_options = new WeakMap();
|
||||
//# sourceMappingURL=ScopeManager.js.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.js.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/ScopeManager.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
62
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.d.ts
generated
vendored
Normal file
62
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.d.ts
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
import type { EcmaVersion, Lib, TSESTree } from '@typescript-eslint/types';
|
||||
import type { ReferencerOptions } from './referencer';
|
||||
import { ScopeManager } from './ScopeManager';
|
||||
interface AnalyzeOptions {
|
||||
/**
|
||||
* Known visitor keys.
|
||||
*/
|
||||
childVisitorKeys?: ReferencerOptions['childVisitorKeys'];
|
||||
/**
|
||||
* Which ECMAScript version is considered.
|
||||
* Defaults to `2018`.
|
||||
* `'latest'` is converted to 1e8 at parser.
|
||||
*/
|
||||
ecmaVersion?: EcmaVersion | 1e8;
|
||||
/**
|
||||
* Whether the whole script is executed under node.js environment.
|
||||
* When enabled, the scope manager adds a function scope immediately following the global scope.
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
globalReturn?: boolean;
|
||||
/**
|
||||
* Implied strict mode (if ecmaVersion >= 5).
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
impliedStrict?: boolean;
|
||||
/**
|
||||
* The identifier that's used for JSX Element creation (after transpilation).
|
||||
* This should not be a member expression - just the root identifier (i.e. use "React" instead of "React.createElement").
|
||||
* Defaults to `"React"`.
|
||||
*/
|
||||
jsxPragma?: string | null;
|
||||
/**
|
||||
* The identifier that's used for JSX fragment elements (after transpilation).
|
||||
* If `null`, assumes transpilation will always use a member on `jsxFactory` (i.e. React.Fragment).
|
||||
* This should not be a member expression - just the root identifier (i.e. use "h" instead of "h.Fragment").
|
||||
* Defaults to `null`.
|
||||
*/
|
||||
jsxFragmentName?: string | null;
|
||||
/**
|
||||
* The lib used by the project.
|
||||
* This automatically defines a type variable for any types provided by the configured TS libs.
|
||||
* Defaults to the lib for the provided `ecmaVersion`.
|
||||
*
|
||||
* https://www.typescriptlang.org/tsconfig#lib
|
||||
*/
|
||||
lib?: Lib[];
|
||||
/**
|
||||
* The source type of the script.
|
||||
*/
|
||||
sourceType?: 'script' | 'module';
|
||||
/**
|
||||
* Emit design-type metadata for decorated declarations in source.
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
emitDecoratorMetadata?: boolean;
|
||||
}
|
||||
/**
|
||||
* Takes an AST and returns the analyzed scopes.
|
||||
*/
|
||||
declare function analyze(tree: TSESTree.Node, providedOptions?: AnalyzeOptions): ScopeManager;
|
||||
export { analyze, AnalyzeOptions };
|
||||
//# sourceMappingURL=analyze.d.ts.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.d.ts.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze.d.ts","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,WAAW,EAAE,GAAG,EAAE,QAAQ,EAAE,MAAM,0BAA0B,CAAC;AAI3E,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,cAAc,CAAC;AAEtD,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAM9C,UAAU,cAAc;IACtB;;OAEG;IACH,gBAAgB,CAAC,EAAE,iBAAiB,CAAC,kBAAkB,CAAC,CAAC;IAEzD;;;;OAIG;IACH,WAAW,CAAC,EAAE,WAAW,GAAG,GAAG,CAAC;IAEhC;;;;OAIG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IAExB;;;;OAIG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE1B;;;;;OAKG;IACH,eAAe,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAEhC;;;;;;OAMG;IACH,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC;IAEZ;;OAEG;IACH,UAAU,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;IAEjC;;;OAGG;IACH,qBAAqB,CAAC,EAAE,OAAO,CAAC;CACjC;AA6BD;;GAEG;AACH,iBAAS,OAAO,CACd,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,eAAe,CAAC,EAAE,cAAc,GAC/B,YAAY,CAgCd;AAED,OAAO,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC"}
|
||||
58
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.js
generated
vendored
Normal file
58
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.js
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.analyze = void 0;
|
||||
const visitor_keys_1 = require("@typescript-eslint/visitor-keys");
|
||||
const lib_1 = require("./lib");
|
||||
const referencer_1 = require("./referencer");
|
||||
const ScopeManager_1 = require("./ScopeManager");
|
||||
const DEFAULT_OPTIONS = {
|
||||
childVisitorKeys: visitor_keys_1.visitorKeys,
|
||||
ecmaVersion: 2018,
|
||||
globalReturn: false,
|
||||
impliedStrict: false,
|
||||
jsxPragma: 'React',
|
||||
jsxFragmentName: null,
|
||||
lib: ['es2018'],
|
||||
sourceType: 'script',
|
||||
emitDecoratorMetadata: false,
|
||||
};
|
||||
/**
|
||||
* Convert ecmaVersion to lib.
|
||||
* `'latest'` is converted to 1e8 at parser.
|
||||
*/
|
||||
function mapEcmaVersion(version) {
|
||||
if (version == null || version === 3 || version === 5) {
|
||||
return 'es5';
|
||||
}
|
||||
const year = version > 2000 ? version : 2015 + (version - 6);
|
||||
const lib = `es${year}`;
|
||||
return lib in lib_1.lib ? lib : year > 2020 ? 'esnext' : 'es5';
|
||||
}
|
||||
/**
|
||||
* Takes an AST and returns the analyzed scopes.
|
||||
*/
|
||||
function analyze(tree, providedOptions) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
||||
const ecmaVersion = (_a = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.ecmaVersion) !== null && _a !== void 0 ? _a : DEFAULT_OPTIONS.ecmaVersion;
|
||||
const options = {
|
||||
childVisitorKeys: (_b = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.childVisitorKeys) !== null && _b !== void 0 ? _b : DEFAULT_OPTIONS.childVisitorKeys,
|
||||
ecmaVersion,
|
||||
globalReturn: (_c = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.globalReturn) !== null && _c !== void 0 ? _c : DEFAULT_OPTIONS.globalReturn,
|
||||
impliedStrict: (_d = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.impliedStrict) !== null && _d !== void 0 ? _d : DEFAULT_OPTIONS.impliedStrict,
|
||||
jsxPragma: (providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.jsxPragma) === undefined
|
||||
? DEFAULT_OPTIONS.jsxPragma
|
||||
: providedOptions.jsxPragma,
|
||||
jsxFragmentName: (_e = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.jsxFragmentName) !== null && _e !== void 0 ? _e : DEFAULT_OPTIONS.jsxFragmentName,
|
||||
sourceType: (_f = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.sourceType) !== null && _f !== void 0 ? _f : DEFAULT_OPTIONS.sourceType,
|
||||
lib: (_g = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.lib) !== null && _g !== void 0 ? _g : [mapEcmaVersion(ecmaVersion)],
|
||||
emitDecoratorMetadata: (_h = providedOptions === null || providedOptions === void 0 ? void 0 : providedOptions.emitDecoratorMetadata) !== null && _h !== void 0 ? _h : DEFAULT_OPTIONS.emitDecoratorMetadata,
|
||||
};
|
||||
// ensure the option is lower cased
|
||||
options.lib = options.lib.map(l => l.toLowerCase());
|
||||
const scopeManager = new ScopeManager_1.ScopeManager(options);
|
||||
const referencer = new referencer_1.Referencer(options, scopeManager);
|
||||
referencer.visit(tree);
|
||||
return scopeManager;
|
||||
}
|
||||
exports.analyze = analyze;
|
||||
//# sourceMappingURL=analyze.js.map
|
||||
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.js.map
generated
vendored
Normal file
1
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/analyze.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"analyze.js","sourceRoot":"","sources":["../src/analyze.ts"],"names":[],"mappings":";;;AACA,kEAA8D;AAE9D,+BAA2C;AAE3C,6CAA0C;AAC1C,iDAA8C;AAoE9C,MAAM,eAAe,GAA6B;IAChD,gBAAgB,EAAE,0BAAW;IAC7B,WAAW,EAAE,IAAI;IACjB,YAAY,EAAE,KAAK;IACnB,aAAa,EAAE,KAAK;IACpB,SAAS,EAAE,OAAO;IAClB,eAAe,EAAE,IAAI;IACrB,GAAG,EAAE,CAAC,QAAQ,CAAC;IACf,UAAU,EAAE,QAAQ;IACpB,qBAAqB,EAAE,KAAK;CAC7B,CAAC;AAEF;;;GAGG;AACH,SAAS,cAAc,CAAC,OAAsC;IAC5D,IAAI,OAAO,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,OAAO,KAAK,CAAC,EAAE;QACrD,OAAO,KAAK,CAAC;KACd;IAED,MAAM,IAAI,GAAG,OAAO,GAAG,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAC7D,MAAM,GAAG,GAAG,KAAK,IAAI,EAAE,CAAC;IAExB,OAAO,GAAG,IAAI,SAAW,CAAC,CAAC,CAAE,GAAW,CAAC,CAAC,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC;AAC5E,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CACd,IAAmB,EACnB,eAAgC;;IAEhC,MAAM,WAAW,GACf,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,WAAW,mCAAI,eAAe,CAAC,WAAW,CAAC;IAC9D,MAAM,OAAO,GAA6B;QACxC,gBAAgB,EACd,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,gBAAgB,mCAAI,eAAe,CAAC,gBAAgB;QACvE,WAAW;QACX,YAAY,EAAE,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,YAAY,mCAAI,eAAe,CAAC,YAAY;QAC3E,aAAa,EACX,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,aAAa,mCAAI,eAAe,CAAC,aAAa;QACjE,SAAS,EACP,CAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,SAAS,MAAK,SAAS;YACtC,CAAC,CAAC,eAAe,CAAC,SAAS;YAC3B,CAAC,CAAC,eAAe,CAAC,SAAS;QAC/B,eAAe,EACb,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,eAAe,mCAAI,eAAe,CAAC,eAAe;QACrE,UAAU,EAAE,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,UAAU,mCAAI,eAAe,CAAC,UAAU;QACrE,GAAG,EAAE,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,GAAG,mCAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;QAC1D,qBAAqB,EACnB,MAAA,eAAe,aAAf,eAAe,uBAAf,eAAe,CAAE,qBAAqB,mCACtC,eAAe,CAAC,qBAAqB;KACxC,CAAC;IAEF,mCAAmC;IACnC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAS,CAAC,CAAC;IAE3D,MAAM,YAAY,GAAG,IAAI,2BAAY,CAAC,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,uBAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;IAEzD,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;IAEvB,OAAO,YAAY,CAAC;AACtB,CAAC;AAEQ,0BAAO"}
|
||||
3
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/assert.d.ts
generated
vendored
Normal file
3
node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager/dist/assert.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
declare function assert(value: unknown, message?: string): asserts value;
|
||||
export { assert };
|
||||
//# sourceMappingURL=assert.d.ts.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user