mirror of
https://github.com/github/codeql-action.git
synced 2025-12-12 18:50:12 +08:00
Compare commits
126 Commits
aeisenberg
...
v2.2.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
168b99b3c2 | ||
|
|
bc7318da91 | ||
|
|
433fe88bf3 | ||
|
|
c208575433 | ||
|
|
b8ea587211 | ||
|
|
65f42e3768 | ||
|
|
d9ceda3823 | ||
|
|
19f00dc212 | ||
|
|
ec298233c1 | ||
|
|
a92a14621b | ||
|
|
204eadab9d | ||
|
|
0e50a19ce3 | ||
|
|
4775eab92e | ||
|
|
16964e90ba | ||
|
|
74cbab4958 | ||
|
|
b36480d849 | ||
|
|
53f80edaf6 | ||
|
|
b31d983f22 | ||
|
|
485cc11a24 | ||
|
|
fc1366f6ec | ||
|
|
e12a2ecd45 | ||
|
|
d47d4c8047 | ||
|
|
f13b180fb8 | ||
|
|
a3cf96418e | ||
|
|
0c27d0da4a | ||
|
|
4b1f530308 | ||
|
|
d98eadb536 | ||
|
|
e4b846c482 | ||
|
|
c310f094dd | ||
|
|
4366485427 | ||
|
|
8340258886 | ||
|
|
6ef6e50882 | ||
|
|
eb40427b00 | ||
|
|
7806af3040 | ||
|
|
abf1cea835 | ||
|
|
e5ade42937 | ||
|
|
6f079be771 | ||
|
|
100bd7bbef | ||
|
|
a6d3a44519 | ||
|
|
5e4af3a25d | ||
|
|
e812e63bb6 | ||
|
|
a589d4087e | ||
|
|
98d24e5629 | ||
|
|
903be79953 | ||
|
|
18ff14b615 | ||
|
|
36a249f5ae | ||
|
|
041757fc59 | ||
|
|
8f19113f88 | ||
|
|
cf1855ae37 | ||
|
|
652709d1b9 | ||
|
|
32dc499307 | ||
|
|
b742728ac2 | ||
|
|
237a258d2b | ||
|
|
5972e6d72e | ||
|
|
164027e682 | ||
|
|
736263f8fe | ||
|
|
3dde1f3512 | ||
|
|
d7d7567b0e | ||
|
|
0e4e857bab | ||
|
|
08d1f21d4f | ||
|
|
f3bd25eefa | ||
|
|
41f1810e52 | ||
|
|
d87ad69338 | ||
|
|
8242edb8ed | ||
|
|
3095a09bb0 | ||
|
|
e00cd12e3e | ||
|
|
a25536bc80 | ||
|
|
a2487fb969 | ||
|
|
e187d074ed | ||
|
|
89c5165e5a | ||
|
|
ba216f7d34 | ||
|
|
68f4f0d3bb | ||
|
|
12d9a244fa | ||
|
|
17573ee1cc | ||
|
|
b6975b4b1a | ||
|
|
b011dbdedf | ||
|
|
40babc141f | ||
|
|
5492b7d104 | ||
|
|
3c81243bb1 | ||
|
|
e2f72f11e4 | ||
|
|
7ba5ed7eed | ||
|
|
21f3020df6 | ||
|
|
b872c5adfd | ||
|
|
8775e86802 | ||
|
|
a2ad80b966 | ||
|
|
c4e22e9fce | ||
|
|
db534af2ae | ||
|
|
bbe8d375fd | ||
|
|
4369dda4ae | ||
|
|
4f08c2cf20 | ||
|
|
81644f35ff | ||
|
|
9ab6aa64a0 | ||
|
|
256973e279 | ||
|
|
59b25b480f | ||
|
|
39d8d7e78f | ||
|
|
39c954c513 | ||
|
|
8af83634ca | ||
|
|
927de483f0 | ||
|
|
e4c0a1b24d | ||
|
|
d3962273b3 | ||
|
|
c3cb270725 | ||
|
|
2b674f7ab9 | ||
|
|
6d47a7c8b1 | ||
|
|
c6ff11c1c4 | ||
|
|
d3f2b2e6d2 | ||
|
|
d49282c3b5 | ||
|
|
c5c475188a | ||
|
|
f140af5e28 | ||
|
|
e0fc1c91b2 | ||
|
|
b95df0b2e7 | ||
|
|
2fed02cbe2 | ||
|
|
0b2a40fa4a | ||
|
|
395ec04a8b | ||
|
|
e1070bd101 | ||
|
|
3ebbd71c74 | ||
|
|
2ae6e13cc3 | ||
|
|
4664f39699 | ||
|
|
b2e16761f3 | ||
|
|
592a896a53 | ||
|
|
4a6b5a54c2 | ||
|
|
436dbd9100 | ||
|
|
d966969093 | ||
|
|
f6d03f448d | ||
|
|
43f1a6c701 | ||
|
|
75ae065ae6 | ||
|
|
0a9e9db27f |
@@ -33,6 +33,12 @@
|
|||||||
"alphabetize": {"order": "asc"},
|
"alphabetize": {"order": "asc"},
|
||||||
"newlines-between": "always"
|
"newlines-between": "always"
|
||||||
}],
|
}],
|
||||||
|
"max-len": ["error", {
|
||||||
|
"code": 120,
|
||||||
|
"ignoreUrls": true,
|
||||||
|
"ignoreStrings": true,
|
||||||
|
"ignoreTemplateLiterals": true
|
||||||
|
}],
|
||||||
"no-async-foreach/no-async-foreach": "error",
|
"no-async-foreach/no-async-foreach": "error",
|
||||||
"no-console": "off",
|
"no-console": "off",
|
||||||
"no-sequences": "error",
|
"no-sequences": "error",
|
||||||
|
|||||||
1
.github/codeql/codeql-config.yml
vendored
1
.github/codeql/codeql-config.yml
vendored
@@ -7,6 +7,7 @@ queries:
|
|||||||
# we include both even though one is a superset of the
|
# we include both even though one is a superset of the
|
||||||
# other, because we're testing the parsing logic and
|
# other, because we're testing the parsing logic and
|
||||||
# that the suites exist in the codeql bundle.
|
# that the suites exist in the codeql bundle.
|
||||||
|
- uses: security-experimental
|
||||||
- uses: security-extended
|
- uses: security-extended
|
||||||
- uses: security-and-quality
|
- uses: security-and-quality
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
|
|||||||
4
.github/dependabot.yml
vendored
4
.github/dependabot.yml
vendored
@@ -15,3 +15,7 @@ updates:
|
|||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: weekly
|
interval: weekly
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: "/.github/setup-swift/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
|||||||
2
.github/setup-swift/action.yml
vendored
2
.github/setup-swift/action.yml
vendored
@@ -26,7 +26,7 @@ runs:
|
|||||||
VERSION="5.7.0"
|
VERSION="5.7.0"
|
||||||
fi
|
fi
|
||||||
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
echo "version=$VERSION" | tee -a $GITHUB_OUTPUT
|
||||||
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
- uses: swift-actions/setup-swift@da0e3e04b5e3e15dbc3861bd835ad9f0afe56296 # Please update the corresponding SHA in the CLI's CodeQL Action Integration Test.
|
||||||
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
if: "(runner.os != 'Windows') && (matrix.version == 'cached' || matrix.version == 'latest' || matrix.version == 'nightly-latest')"
|
||||||
with:
|
with:
|
||||||
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
swift-version: "${{steps.get_swift_version.outputs.version}}"
|
||||||
|
|||||||
2
.github/update-release-branch.py
vendored
2
.github/update-release-branch.py
vendored
@@ -161,7 +161,7 @@ def update_changelog(version):
|
|||||||
else:
|
else:
|
||||||
content = EMPTY_CHANGELOG
|
content = EMPTY_CHANGELOG
|
||||||
|
|
||||||
newContent = content.replace('[UNRELEASED]', f'${version} - {get_today_string()}', 1)
|
newContent = content.replace('[UNRELEASED]', f'{version} - {get_today_string()}', 1)
|
||||||
|
|
||||||
with open('CHANGELOG.md', 'w') as f:
|
with open('CHANGELOG.md', 'w') as f:
|
||||||
f.write(newContent)
|
f.write(newContent)
|
||||||
|
|||||||
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
1
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -88,6 +88,7 @@ jobs:
|
|||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
|
upload-database: false
|
||||||
ref: refs/heads/main
|
ref: refs/heads/main
|
||||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||||
env:
|
env:
|
||||||
|
|||||||
2
.github/workflows/__autobuild-action.yml
generated
vendored
2
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -56,6 +56,8 @@ jobs:
|
|||||||
CORECLR_PROFILER: ''
|
CORECLR_PROFILER: ''
|
||||||
CORECLR_PROFILER_PATH_64: ''
|
CORECLR_PROFILER_PATH_64: ''
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- name: Check database
|
- name: Check database
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
90
.github/workflows/__config-export.yml
generated
vendored
Normal file
90
.github/workflows/__config-export.yml
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Config export
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v2
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
config-export:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: latest
|
||||||
|
- os: windows-latest
|
||||||
|
version: latest
|
||||||
|
name: Config export
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: ./../action/init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
queries: security-extended
|
||||||
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
- name: Upload SARIF
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
|
||||||
|
path: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
retention-days: 7
|
||||||
|
- name: Check config properties appear in SARIF
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
env:
|
||||||
|
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const sarif = JSON.parse(fs.readFileSync(process.env['SARIF_PATH'], 'utf8'));
|
||||||
|
const run = sarif.runs[0];
|
||||||
|
const configSummary = run.properties.codeqlConfigSummary;
|
||||||
|
|
||||||
|
if (configSummary === undefined) {
|
||||||
|
core.setFailed('`codeqlConfigSummary` property not found in the SARIF run property bag.');
|
||||||
|
}
|
||||||
|
if (configSummary.disableDefaultQueries !== false) {
|
||||||
|
core.setFailed('`disableDefaultQueries` property incorrect: expected false, got ' +
|
||||||
|
`${JSON.stringify(configSummary.disableDefaultQueries)}.`);
|
||||||
|
}
|
||||||
|
const expectedQueries = [{ type: 'builtinSuite', uses: 'security-extended' }];
|
||||||
|
// Use JSON.stringify to deep-equal the arrays.
|
||||||
|
if (JSON.stringify(configSummary.queries) !== JSON.stringify(expectedQueries)) {
|
||||||
|
core.setFailed(`\`queries\` property incorrect: expected ${JSON.stringify(expectedQueries)}, got ` +
|
||||||
|
`${JSON.stringify(configSummary.queries)}.`);
|
||||||
|
}
|
||||||
|
core.info('Finished config export tests.');
|
||||||
|
env:
|
||||||
|
CODEQL_ACTION_EXPORT_CODE_SCANNING_CONFIG: true
|
||||||
|
CODEQL_PASS_CONFIG_TO_CLI: true
|
||||||
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
2
.github/workflows/__go-custom-queries.yml
generated
vendored
2
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -86,6 +86,8 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
env:
|
env:
|
||||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
DOTNET_GENERATE_ASPNET_CERTIFICATE: 'false'
|
||||||
CODEQL_ACTION_TEST_MODE: true
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
2
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -71,6 +71,8 @@ jobs:
|
|||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- uses: ./../action/autobuild
|
- uses: ./../action/autobuild
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
|
if [[ "${CODEQL_ACTION_DID_AUTOBUILD_GOLANG}" != true ]]; then
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
2
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -73,6 +73,8 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: go build main.go
|
run: go build main.go
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
# Once we start running Bash 4.2 in all environments, we can replace the
|
# Once we start running Bash 4.2 in all environments, we can replace the
|
||||||
|
|||||||
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
2
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -70,6 +70,8 @@ jobs:
|
|||||||
languages: go
|
languages: go
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd "$RUNNER_TEMP/codeql_databases"
|
cd "$RUNNER_TEMP/codeql_databases"
|
||||||
|
|||||||
42
.github/workflows/__init-with-registries.yml
generated
vendored
42
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -25,6 +25,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: macos-latest
|
||||||
|
version: cached
|
||||||
|
- os: windows-latest
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: macos-latest
|
||||||
|
version: latest
|
||||||
|
- os: windows-latest
|
||||||
|
version: latest
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-latest
|
||||||
version: nightly-latest
|
version: nightly-latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
@@ -75,5 +87,35 @@ jobs:
|
|||||||
echo "::error $CODEQL_PACK1 pack was not installed."
|
echo "::error $CODEQL_PACK1 pack was not installed."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Verify qlconfig.yml file was created
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||||
|
echo "Expected qlconfig.yml file to be created at $QLCONFIG_PATH"
|
||||||
|
if [[ -f $QLCONFIG_PATH ]]
|
||||||
|
then
|
||||||
|
echo "qlconfig.yml file was created."
|
||||||
|
else
|
||||||
|
echo "::error qlconfig.yml file was not created."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Verify contents of qlconfig.yml
|
||||||
|
# yq is not available on windows
|
||||||
|
if: runner.os != 'Windows'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
QLCONFIG_PATH=$RUNNER_TEMP/qlconfig.yml
|
||||||
|
cat $QLCONFIG_PATH | yq -e '.registries[] | select(.url == "https://ghcr.io/v2/") | select(.packages == "*/*")'
|
||||||
|
if [[ $? -eq 0 ]]
|
||||||
|
then
|
||||||
|
echo "Registry was added to qlconfig.yml file."
|
||||||
|
else
|
||||||
|
echo "::error Registry was not added to qlconfig.yml file."
|
||||||
|
echo "Contents of qlconfig.yml file:"
|
||||||
|
cat $QLCONFIG_PATH
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
env:
|
env:
|
||||||
CODEQL_ACTION_TEST_MODE: true
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
1
.github/workflows/__javascript-source-root.yml
generated
vendored
1
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -54,6 +54,7 @@ jobs:
|
|||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
|
upload-database: false
|
||||||
skip-queries: true
|
skip-queries: true
|
||||||
upload: false
|
upload: false
|
||||||
- name: Assert database exists
|
- name: Assert database exists
|
||||||
|
|||||||
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
2
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -81,6 +81,8 @@ jobs:
|
|||||||
|
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- name: Check language autodetect for all languages excluding Ruby, Swift
|
- name: Check language autodetect for all languages excluding Ruby, Swift
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -66,6 +66,7 @@ jobs:
|
|||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- name: Check results
|
- name: Check results
|
||||||
uses: ./../action/.github/check-sarif
|
uses: ./../action/.github/check-sarif
|
||||||
|
|||||||
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
1
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -66,6 +66,7 @@ jobs:
|
|||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- name: Check results
|
- name: Check results
|
||||||
uses: ./../action/.github/check-sarif
|
uses: ./../action/.github/check-sarif
|
||||||
|
|||||||
1
.github/workflows/__packaging-config-js.yml
generated
vendored
1
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -65,6 +65,7 @@ jobs:
|
|||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- name: Check results
|
- name: Check results
|
||||||
uses: ./../action/.github/check-sarif
|
uses: ./../action/.github/check-sarif
|
||||||
|
|||||||
2
.github/workflows/__ruby.yml
generated
vendored
2
.github/workflows/__ruby.yml
generated
vendored
@@ -54,6 +54,8 @@ jobs:
|
|||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- name: Check database
|
- name: Check database
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
1
.github/workflows/__split-workflow.yml
generated
vendored
1
.github/workflows/__split-workflow.yml
generated
vendored
@@ -61,6 +61,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
skip-queries: true
|
skip-queries: true
|
||||||
output: ${{ runner.temp }}/results
|
output: ${{ runner.temp }}/results
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- name: Assert No Results
|
- name: Assert No Results
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|||||||
2
.github/workflows/__swift-autobuild.yml
generated
vendored
2
.github/workflows/__swift-autobuild.yml
generated
vendored
@@ -57,6 +57,8 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- name: Check database
|
- name: Check database
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/__swift-custom-build.yml
generated
vendored
2
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -64,6 +64,8 @@ jobs:
|
|||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- name: Check database
|
- name: Check database
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -53,6 +53,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
working-directory: autobuild-dir
|
working-directory: autobuild-dir
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- name: Check database
|
- name: Check database
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -51,5 +51,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
env:
|
env:
|
||||||
CODEQL_ACTION_TEST_MODE: true
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
2
.github/workflows/__test-proxy.yml
generated
vendored
2
.github/workflows/__test-proxy.yml
generated
vendored
@@ -43,6 +43,8 @@ jobs:
|
|||||||
languages: javascript
|
languages: javascript
|
||||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
env:
|
env:
|
||||||
https_proxy: http://squid-proxy:3128
|
https_proxy: http://squid-proxy:3128
|
||||||
CODEQL_ACTION_TEST_MODE: true
|
CODEQL_ACTION_TEST_MODE: true
|
||||||
|
|||||||
2
.github/workflows/__unset-environment.yml
generated
vendored
2
.github/workflows/__unset-environment.yml
generated
vendored
@@ -65,6 +65,8 @@ jobs:
|
|||||||
./build.sh
|
./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
id: analysis
|
id: analysis
|
||||||
|
with:
|
||||||
|
upload-database: false
|
||||||
- shell: bash
|
- shell: bash
|
||||||
run: |
|
run: |
|
||||||
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
CPP_DB="${{ fromJson(steps.analysis.outputs.db-locations).cpp }}"
|
||||||
|
|||||||
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
1
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -88,6 +88,7 @@ jobs:
|
|||||||
run: ./build.sh
|
run: ./build.sh
|
||||||
- uses: ./../action/analyze
|
- uses: ./../action/analyze
|
||||||
with:
|
with:
|
||||||
|
upload-database: false
|
||||||
ref: refs/heads/main
|
ref: refs/heads/main
|
||||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||||
upload: false
|
upload: false
|
||||||
|
|||||||
1
.github/workflows/__with-checkout-path.yml
generated
vendored
1
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -104,6 +104,7 @@ jobs:
|
|||||||
ref: v1.1.0
|
ref: v1.1.0
|
||||||
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
sha: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
|
||||||
upload: false
|
upload: false
|
||||||
|
upload-database: false
|
||||||
|
|
||||||
- uses: ./../action/upload-sarif
|
- uses: ./../action/upload-sarif
|
||||||
with:
|
with:
|
||||||
|
|||||||
10
.github/workflows/script/check-node-modules.sh
vendored
10
.github/workflows/script/check-node-modules.sh
vendored
@@ -7,13 +7,9 @@ if [ ! -z "$(git status --porcelain)" ]; then
|
|||||||
>&2 echo "Failed: Repo should be clean before testing!"
|
>&2 echo "Failed: Repo should be clean before testing!"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
# When updating this, make sure to update the npm version in
|
|
||||||
# `.github/workflows/update-dependencies.yml` too.
|
"$(dirname "$0")/update-node-modules.sh" check-only
|
||||||
sudo npm install --force -g npm@9.2.0
|
|
||||||
# Reinstall modules and then clean to remove absolute paths
|
|
||||||
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
# Check that repo is still clean
|
# Check that repo is still clean
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
# If we get a fail here then the PR needs attention
|
# If we get a fail here then the PR needs attention
|
||||||
|
|||||||
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
18
.github/workflows/script/update-node-modules.sh
vendored
Executable file
@@ -0,0 +1,18 @@
|
|||||||
|
if [ "$1" != "update" && "$1" != "check-only" ]; then
|
||||||
|
>&2 echo "Failed: Invalid argument. Must be 'update' or 'check-only'"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
sudo npm install --force -g npm@9.2.0
|
||||||
|
|
||||||
|
# clean the npm cache to ensure we don't have any files owned by root
|
||||||
|
sudo npm cache clean --force
|
||||||
|
|
||||||
|
if [ "$1" = "update" ]; then
|
||||||
|
npm install
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reinstall modules and then clean to remove absolute paths
|
||||||
|
# Use 'npm ci' instead of 'npm install' as this is intended to be reproducible
|
||||||
|
npm ci
|
||||||
|
npm run removeNPMAbsolutePaths
|
||||||
7
.github/workflows/update-dependencies.yml
vendored
7
.github/workflows/update-dependencies.yml
vendored
@@ -27,12 +27,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
git fetch origin "$BRANCH" --depth=1
|
git fetch origin "$BRANCH" --depth=1
|
||||||
git checkout "origin/$BRANCH"
|
git checkout "origin/$BRANCH"
|
||||||
# When updating this, make sure to update the npm version in
|
.github/workflows/script/update-node-modules.sh update
|
||||||
# `.github/workflows/script/check-node-modules.sh` too.
|
|
||||||
sudo npm install --force -g npm@9.2.0
|
|
||||||
npm install
|
|
||||||
npm ci
|
|
||||||
npm run removeNPMAbsolutePaths
|
|
||||||
if [ ! -z "$(git status --porcelain)" ]; then
|
if [ ! -z "$(git status --porcelain)" ]; then
|
||||||
git config --global user.email "github-actions@github.com"
|
git config --global user.email "github-actions@github.com"
|
||||||
git config --global user.name "github-actions[bot]"
|
git config --global user.name "github-actions[bot]"
|
||||||
|
|||||||
30
CHANGELOG.md
30
CHANGELOG.md
@@ -1,6 +1,34 @@
|
|||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
## [UNRELEASED]
|
## 2.2.7 - 15 Mar 2023
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 2.2.6 - 10 Mar 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.4.
|
||||||
|
|
||||||
|
## 2.2.5 - 24 Feb 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.3. [#1543](https://github.com/github/codeql-action/pull/1543)
|
||||||
|
|
||||||
|
## 2.2.4 - 10 Feb 2023
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 2.2.3 - 08 Feb 2023
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.12.2. [#1518](https://github.com/github/codeql-action/pull/1518)
|
||||||
|
|
||||||
|
## 2.2.2 - 06 Feb 2023
|
||||||
|
|
||||||
|
- Fix an issue where customers using the CodeQL Action with the [CodeQL Action sync tool](https://docs.github.com/en/enterprise-server@3.7/admin/code-security/managing-github-advanced-security-for-your-enterprise/configuring-code-scanning-for-your-appliance#configuring-codeql-analysis-on-a-server-without-internet-access) would not be able to obtain the CodeQL tools. [#1517](https://github.com/github/codeql-action/pull/1517)
|
||||||
|
|
||||||
|
## 2.2.1 - 27 Jan 2023
|
||||||
|
|
||||||
|
No user facing changes.
|
||||||
|
|
||||||
|
## 2.2.0 - 26 Jan 2023
|
||||||
|
|
||||||
- Improve stability when choosing the default version of CodeQL to use in code scanning workflow runs on Actions on GitHub.com. [#1475](https://github.com/github/codeql-action/pull/1475)
|
- Improve stability when choosing the default version of CodeQL to use in code scanning workflow runs on Actions on GitHub.com. [#1475](https://github.com/github/codeql-action/pull/1475)
|
||||||
- This change addresses customer reports of code scanning alerts on GitHub.com being closed and reopened during the rollout of new versions of CodeQL in the GitHub Actions [runner images](https://github.com/actions/runner-images).
|
- This change addresses customer reports of code scanning alerts on GitHub.com being closed and reopened during the rollout of new versions of CodeQL in the GitHub Actions [runner images](https://github.com/actions/runner-images).
|
||||||
|
|||||||
@@ -67,12 +67,8 @@ Here are a few things you can do that will increase the likelihood of your pull
|
|||||||
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
This mergeback incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v2" pull request, and bumps the patch version of the CodeQL Action.
|
||||||
|
|
||||||
Approve the mergeback PR and automerge it.
|
Approve the mergeback PR and automerge it.
|
||||||
1. When the "Merge main into releases/v2" pull request is merged into the `releases/v2` branch, the "Update release branch" workflow will create a "Merge releases/v2 into releases/v1" pull request to merge the changes since the last release into the `releases/v1` release branch.
|
|
||||||
This ensures we keep both the `releases/v1` and `releases/v2` release branches up to date and fully supported.
|
|
||||||
|
|
||||||
Review the checklist items in the pull request description.
|
Once the mergeback has been merged to `main`, the release is complete.
|
||||||
Once you've checked off all the items, approve the PR and automerge it.
|
|
||||||
1. Once the mergeback has been merged to `main` and the "Merge releases/v2 into releases/v1" PR has been merged to `releases/v1`, the release is complete.
|
|
||||||
|
|
||||||
## Keeping the PR checks up to date (admin access required)
|
## Keeping the PR checks up to date (admin access required)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# CodeQL Action
|
# CodeQL Action
|
||||||
|
|
||||||
This action runs GitHub's industry-leading semantic code analysis engine, CodeQL, against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
This action runs GitHub's industry-leading semantic code analysis engine, [CodeQL](https://codeql.github.com/), against a repository's source code to find security vulnerabilities. It then automatically uploads the results to GitHub so they can be displayed in the repository's security tab. CodeQL runs an extensible set of [queries](https://github.com/github/codeql), which have been developed by the community and the [GitHub Security Lab](https://securitylab.github.com/) to find common vulnerabilities in your code.
|
||||||
|
|
||||||
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
For a list of recent changes, see the CodeQL Action's [changelog](CHANGELOG.md).
|
||||||
|
|
||||||
|
|||||||
40
lib/analyze.js
generated
40
lib/analyze.js
generated
@@ -123,16 +123,17 @@ async function finalizeDatabaseCreation(config, threadsFlag, memoryFlag, logger)
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
// Runs queries and creates sarif files in the given folder
|
// Runs queries and creates sarif files in the given folder
|
||||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, featureEnablement) {
|
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, features) {
|
||||||
const statusReport = {};
|
const statusReport = {};
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
await util.logCodeScanningConfigInCli(codeql, featureEnablement, logger);
|
const queryFlags = [memoryFlag, threadsFlag];
|
||||||
|
await util.logCodeScanningConfigInCli(codeql, features, logger);
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
const queries = config.queries[language];
|
const queries = config.queries[language];
|
||||||
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
const queryFilters = validateQueryFilters(config.originalUserInput["query-filters"]);
|
||||||
const packsWithVersion = config.packs[language] || [];
|
const packsWithVersion = config.packs[language] || [];
|
||||||
try {
|
try {
|
||||||
if (await util.useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||||
// If we are using the code scanning config in the CLI,
|
// If we are using the code scanning config in the CLI,
|
||||||
// much of the work needed to generate the query suites
|
// much of the work needed to generate the query suites
|
||||||
// is done in the CLI. We just need to make a single
|
// is done in the CLI. We just need to make a single
|
||||||
@@ -140,7 +141,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
// another to interpret the results.
|
// another to interpret the results.
|
||||||
logger.startGroup(`Running queries for ${language}`);
|
logger.startGroup(`Running queries for ${language}`);
|
||||||
const startTimeBuiltIn = new Date().getTime();
|
const startTimeBuiltIn = new Date().getTime();
|
||||||
await runQueryGroup(language, "all", undefined, undefined);
|
await runQueryGroup(language, "all", undefined, undefined, true);
|
||||||
// TODO should not be using `builtin` here. We should be using `all` instead.
|
// TODO should not be using `builtin` here. We should be using `all` instead.
|
||||||
// The status report does not support `all` yet.
|
// The status report does not support `all` yet.
|
||||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||||
@@ -164,24 +165,29 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
!hasPackWithCustomQueries) {
|
!hasPackWithCustomQueries) {
|
||||||
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
throw new Error(`Unable to analyze ${language} as no queries were selected for this language`);
|
||||||
}
|
}
|
||||||
|
const customQueryIndices = [];
|
||||||
|
for (let i = 0; i < queries.custom.length; ++i) {
|
||||||
|
if (queries.custom[i].queries.length > 0) {
|
||||||
|
customQueryIndices.push(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
logger.startGroup(`Running queries for ${language}`);
|
logger.startGroup(`Running queries for ${language}`);
|
||||||
const querySuitePaths = [];
|
const querySuitePaths = [];
|
||||||
if (queries["builtin"].length > 0) {
|
if (queries.builtin.length > 0) {
|
||||||
const startTimeBuiltIn = new Date().getTime();
|
const startTimeBuiltIn = new Date().getTime();
|
||||||
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries["builtin"], queryFilters), undefined)));
|
querySuitePaths.push((await runQueryGroup(language, "builtin", createQuerySuiteContents(queries.builtin, queryFilters), undefined, customQueryIndices.length === 0 && packsWithVersion.length === 0)));
|
||||||
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
statusReport[`analyze_builtin_queries_${language}_duration_ms`] =
|
||||||
new Date().getTime() - startTimeBuiltIn;
|
new Date().getTime() - startTimeBuiltIn;
|
||||||
}
|
}
|
||||||
const startTimeCustom = new Date().getTime();
|
const startTimeCustom = new Date().getTime();
|
||||||
let ranCustom = false;
|
let ranCustom = false;
|
||||||
for (let i = 0; i < queries["custom"].length; ++i) {
|
for (const i of customQueryIndices) {
|
||||||
if (queries["custom"][i].queries.length > 0) {
|
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries.custom[i].queries, queryFilters), queries.custom[i].searchPath, i === customQueryIndices[customQueryIndices.length - 1] &&
|
||||||
querySuitePaths.push((await runQueryGroup(language, `custom-${i}`, createQuerySuiteContents(queries["custom"][i].queries, queryFilters), queries["custom"][i].searchPath)));
|
packsWithVersion.length === 0)));
|
||||||
ranCustom = true;
|
ranCustom = true;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (packsWithVersion.length > 0) {
|
if (packsWithVersion.length > 0) {
|
||||||
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters));
|
querySuitePaths.push(await runQueryPacks(language, "packs", packsWithVersion, queryFilters, true));
|
||||||
ranCustom = true;
|
ranCustom = true;
|
||||||
}
|
}
|
||||||
if (ranCustom) {
|
if (ranCustom) {
|
||||||
@@ -212,13 +218,13 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
return statusReport;
|
return statusReport;
|
||||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, config, features);
|
||||||
}
|
}
|
||||||
async function runPrintLinesOfCode(language) {
|
async function runPrintLinesOfCode(language) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
return await codeql.databasePrintBaseline(databasePath);
|
return await codeql.databasePrintBaseline(databasePath);
|
||||||
}
|
}
|
||||||
async function runQueryGroup(language, type, querySuiteContents, searchPath) {
|
async function runQueryGroup(language, type, querySuiteContents, searchPath, optimizeForLastQueryRun) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
// Pass the queries to codeql using a file instead of using the command
|
// Pass the queries to codeql using a file instead of using the command
|
||||||
// line to avoid command line length restrictions, particularly on windows.
|
// line to avoid command line length restrictions, particularly on windows.
|
||||||
@@ -229,11 +235,11 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
fs.writeFileSync(querySuitePath, querySuiteContents);
|
fs.writeFileSync(querySuitePath, querySuiteContents);
|
||||||
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
logger.debug(`Query suite file for ${language}-${type}...\n${querySuiteContents}`);
|
||||||
}
|
}
|
||||||
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, memoryFlag, threadsFlag);
|
await codeql.databaseRunQueries(databasePath, searchPath, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
}
|
}
|
||||||
async function runQueryPacks(language, type, packs, queryFilters) {
|
async function runQueryPacks(language, type, packs, queryFilters, optimizeForLastQueryRun) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
for (const pack of packs) {
|
for (const pack of packs) {
|
||||||
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
logger.debug(`Running query pack for ${language}-${type}: ${pack}`);
|
||||||
@@ -243,7 +249,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||||||
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
const querySuitePath = `${databasePath}-queries-${type}.qls`;
|
||||||
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
fs.writeFileSync(querySuitePath, yaml.dump(querySuite));
|
||||||
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
logger.debug(`BQRS results produced for ${language} (queries: ${type})"`);
|
||||||
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, memoryFlag, threadsFlag);
|
await codeql.databaseRunQueries(databasePath, undefined, querySuitePath, queryFlags, optimizeForLastQueryRun);
|
||||||
return querySuitePath;
|
return querySuitePath;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
122
lib/analyze.test.js
generated
122
lib/analyze.test.js
generated
@@ -30,8 +30,10 @@ const fs = __importStar(require("fs"));
|
|||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const sinon = __importStar(require("sinon"));
|
||||||
const analyze_1 = require("./analyze");
|
const analyze_1 = require("./analyze");
|
||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
@@ -188,6 +190,126 @@ const util = __importStar(require("./util"));
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
function mockCodeQL() {
|
||||||
|
return {
|
||||||
|
getVersion: async () => "2.12.2",
|
||||||
|
databaseRunQueries: sinon.spy(),
|
||||||
|
databaseInterpretResults: async () => "",
|
||||||
|
databasePrintBaseline: async () => "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function createBaseConfig(tmpDir) {
|
||||||
|
return {
|
||||||
|
languages: [],
|
||||||
|
queries: {},
|
||||||
|
pathsIgnore: [],
|
||||||
|
paths: [],
|
||||||
|
originalUserInput: {},
|
||||||
|
tempDir: "tempDir",
|
||||||
|
codeQLCmd: "",
|
||||||
|
gitHubVersion: {
|
||||||
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
},
|
||||||
|
dbLocation: path.resolve(tmpDir, "codeql_databases"),
|
||||||
|
packs: {},
|
||||||
|
debugMode: false,
|
||||||
|
debugArtifactName: util.DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||||
|
debugDatabaseName: util.DEFAULT_DEBUG_DATABASE_NAME,
|
||||||
|
augmentationProperties: {
|
||||||
|
injectedMlQueries: false,
|
||||||
|
packsInputCombines: false,
|
||||||
|
queriesInputCombines: false,
|
||||||
|
},
|
||||||
|
trapCaches: {},
|
||||||
|
trapCacheDownloadTime: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function createQueryConfig(builtin, custom) {
|
||||||
|
return {
|
||||||
|
builtin,
|
||||||
|
custom: custom.map((c) => ({ searchPath: "/search", queries: [c] })),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async function runQueriesWithConfig(config, features) {
|
||||||
|
for (const language of config.languages) {
|
||||||
|
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||||
|
recursive: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return (0, analyze_1.runQueries)("sarif-folder", "--memFlag", "--addSnippetsFlag", "--threadsFlag", undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)(features));
|
||||||
|
}
|
||||||
|
function getDatabaseRunQueriesCalls(mock) {
|
||||||
|
return mock.databaseRunQueries.getCalls();
|
||||||
|
}
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for one language", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], []);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], []);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, true, false, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, with custom queries and packs", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
config.queries.cpp = createQueryConfig(["foo.ql"], ["c1.ql", "c2.ql"]);
|
||||||
|
config.queries.java = createQueryConfig(["bar.ql"], ["c3.ql"]);
|
||||||
|
config.packs.cpp = ["a/cpp-pack1@0.1.0"];
|
||||||
|
config.packs.java = ["b/java-pack1@0.2.0", "b/java-pack2@0.3.3"];
|
||||||
|
await runQueriesWithConfig(config, []);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [false, false, false, true, false, false, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for one language, CliConfigFileEnabled", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp];
|
||||||
|
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("optimizeForLastQueryRun for two languages, CliConfigFileEnabled", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const codeql = mockCodeQL();
|
||||||
|
(0, codeql_1.setCodeQL)(codeql);
|
||||||
|
const config = createBaseConfig(tmpDir);
|
||||||
|
config.languages = [languages_1.Language.cpp, languages_1.Language.java];
|
||||||
|
await runQueriesWithConfig(config, [feature_flags_1.Feature.CliConfigFileEnabled]);
|
||||||
|
t.deepEqual(getDatabaseRunQueriesCalls(codeql).map((c) => c.args[4]), [true, true]);
|
||||||
|
});
|
||||||
|
});
|
||||||
(0, ava_1.default)("validateQueryFilters", (t) => {
|
(0, ava_1.default)("validateQueryFilters", (t) => {
|
||||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
t.notThrows(() => (0, analyze_1.validateQueryFilters)([]));
|
||||||
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
t.notThrows(() => (0, analyze_1.validateQueryFilters)(undefined));
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
72
lib/codeql.js
generated
72
lib/codeql.js
generated
@@ -23,13 +23,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_TRACING_GLIBC_2_34 = exports.CODEQL_VERSION_NEW_TRACING = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
const actions_util_1 = require("./actions-util");
|
const actions_util_1 = require("./actions-util");
|
||||||
|
const config_utils_1 = require("./config-utils");
|
||||||
const error_matcher_1 = require("./error-matcher");
|
const error_matcher_1 = require("./error-matcher");
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const setupCodeql = __importStar(require("./setup-codeql"));
|
const setupCodeql = __importStar(require("./setup-codeql"));
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
@@ -94,6 +96,14 @@ exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
|||||||
* --extractor-options-verbosity that we need.
|
* --extractor-options-verbosity that we need.
|
||||||
*/
|
*/
|
||||||
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
||||||
|
/**
|
||||||
|
* Versions 2.11.1+ of the CodeQL Bundle include a `security-experimental` built-in query suite for each language.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = "2.12.1";
|
||||||
|
/**
|
||||||
|
* Versions 2.12.4+ of the CodeQL CLI support the `--qlconfig-file` flag in calls to `database init`.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = "2.12.4";
|
||||||
/**
|
/**
|
||||||
* Set up CodeQL CLI access.
|
* Set up CodeQL CLI access.
|
||||||
*
|
*
|
||||||
@@ -101,16 +111,15 @@ exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = "2.10.3";
|
|||||||
* @param apiDetails
|
* @param apiDetails
|
||||||
* @param tempDir
|
* @param tempDir
|
||||||
* @param variant
|
* @param variant
|
||||||
* @param bypassToolcache
|
|
||||||
* @param defaultCliVersion
|
* @param defaultCliVersion
|
||||||
* @param logger
|
* @param logger
|
||||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||||
* version requirement. Must be set to true outside tests.
|
* version requirement. Must be set to true outside tests.
|
||||||
* @returns a { CodeQL, toolsVersion } object.
|
* @returns a { CodeQL, toolsVersion } object.
|
||||||
*/
|
*/
|
||||||
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, checkVersion) {
|
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, checkVersion) {
|
||||||
try {
|
try {
|
||||||
const { codeqlFolder, toolsDownloadDurationMs, toolsSource, toolsVersion } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger);
|
const { codeqlFolder, toolsDownloadDurationMs, toolsSource, toolsVersion } = await setupCodeql.setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger);
|
||||||
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
let codeqlCmd = path.join(codeqlFolder, "codeql", "codeql");
|
||||||
if (process.platform === "win32") {
|
if (process.platform === "win32") {
|
||||||
codeqlCmd += ".exe";
|
codeqlCmd += ".exe";
|
||||||
@@ -299,7 +308,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
...getExtraOptionsFromEnv(["database", "init"]),
|
...getExtraOptionsFromEnv(["database", "init"]),
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
async databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger) {
|
async databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger) {
|
||||||
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
const extraArgs = config.languages.map((language) => `--language=${language}`);
|
||||||
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
if (config.languages.filter((l) => (0, languages_1.isTracedLanguage)(l)).length > 0) {
|
||||||
extraArgs.push("--begin-tracing");
|
extraArgs.push("--begin-tracing");
|
||||||
@@ -317,17 +326,21 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
extraArgs.push("--no-internal-use-lua-tracing");
|
extraArgs.push("--no-internal-use-lua-tracing");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// A config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
// A code scanning config file is only generated if the CliConfigFileEnabled feature flag is enabled.
|
||||||
const configLocation = await generateCodeScanningConfig(codeql, config, featureEnablement, logger);
|
const codeScanningConfigFile = await generateCodeScanningConfig(codeql, config, features, logger);
|
||||||
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
// Only pass external repository token if a config file is going to be parsed by the CLI.
|
||||||
let externalRepositoryToken;
|
let externalRepositoryToken;
|
||||||
if (configLocation) {
|
if (codeScanningConfigFile) {
|
||||||
extraArgs.push(`--codescanning-config=${configLocation}`);
|
|
||||||
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
externalRepositoryToken = (0, actions_util_1.getOptionalInput)("external-repository-token");
|
||||||
|
extraArgs.push(`--codescanning-config=${codeScanningConfigFile}`);
|
||||||
if (externalRepositoryToken) {
|
if (externalRepositoryToken) {
|
||||||
extraArgs.push("--external-repository-token-stdin");
|
extraArgs.push("--external-repository-token-stdin");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (qlconfigFile !== undefined &&
|
||||||
|
(await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_INIT_WITH_QLCONFIG))) {
|
||||||
|
extraArgs.push(`--qlconfig-file=${qlconfigFile}`);
|
||||||
|
}
|
||||||
await runTool(cmd, [
|
await runTool(cmd, [
|
||||||
"database",
|
"database",
|
||||||
"init",
|
"init",
|
||||||
@@ -469,17 +482,20 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
throw new Error(`Unexpected output from codeql resolve queries: ${e}`);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, memoryFlag, threadsFlag) {
|
async databaseRunQueries(databasePath, extraSearchPath, querySuitePath, flags, optimizeForLastQueryRun) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
"run-queries",
|
"run-queries",
|
||||||
memoryFlag,
|
...flags,
|
||||||
threadsFlag,
|
|
||||||
databasePath,
|
databasePath,
|
||||||
"--min-disk-free=1024",
|
"--min-disk-free=1024",
|
||||||
"-v",
|
"-v",
|
||||||
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
...getExtraOptionsFromEnv(["database", "run-queries"]),
|
||||||
];
|
];
|
||||||
|
if (optimizeForLastQueryRun &&
|
||||||
|
(await util.supportExpectDiscardedCache(this))) {
|
||||||
|
codeqlArgs.push("--expect-discarded-cache");
|
||||||
|
}
|
||||||
if (extraSearchPath !== undefined) {
|
if (extraSearchPath !== undefined) {
|
||||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||||
}
|
}
|
||||||
@@ -488,7 +504,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
}
|
}
|
||||||
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||||
},
|
},
|
||||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, config, features) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
"interpret-results",
|
"interpret-results",
|
||||||
@@ -500,6 +516,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
"--print-diagnostics-summary",
|
"--print-diagnostics-summary",
|
||||||
"--print-metrics-summary",
|
"--print-metrics-summary",
|
||||||
"--sarif-group-rules-by-pack",
|
"--sarif-group-rules-by-pack",
|
||||||
|
...(await getCodeScanningConfigExportArguments(config, this, features)),
|
||||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||||
];
|
];
|
||||||
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_CUSTOM_QUERY_HELP))
|
||||||
@@ -592,12 +609,13 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||||||
];
|
];
|
||||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
},
|
},
|
||||||
async diagnosticsExport(sarifFile, automationDetailsId) {
|
async diagnosticsExport(sarifFile, automationDetailsId, config, features) {
|
||||||
const args = [
|
const args = [
|
||||||
"diagnostics",
|
"diagnostics",
|
||||||
"export",
|
"export",
|
||||||
"--format=sarif-latest",
|
"--format=sarif-latest",
|
||||||
`--output=${sarifFile}`,
|
`--output=${sarifFile}`,
|
||||||
|
...(await getCodeScanningConfigExportArguments(config, this, features)),
|
||||||
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||||
];
|
];
|
||||||
if (automationDetailsId !== undefined) {
|
if (automationDetailsId !== undefined) {
|
||||||
@@ -708,11 +726,11 @@ async function runTool(cmd, args = [], opts = {}) {
|
|||||||
* @param config The configuration to use.
|
* @param config The configuration to use.
|
||||||
* @returns the path to the generated user configuration file.
|
* @returns the path to the generated user configuration file.
|
||||||
*/
|
*/
|
||||||
async function generateCodeScanningConfig(codeql, config, featureEnablement, logger) {
|
async function generateCodeScanningConfig(codeql, config, features, logger) {
|
||||||
if (!(await util.useCodeScanningConfigInCli(codeql, featureEnablement))) {
|
if (!(await util.useCodeScanningConfigInCli(codeql, features))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const configLocation = path.resolve(config.tempDir, "user-config.yaml");
|
const codeScanningConfigFile = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
||||||
// make a copy so we can modify it
|
// make a copy so we can modify it
|
||||||
const augmentedConfig = cloneObject(config.originalUserInput);
|
const augmentedConfig = cloneObject(config.originalUserInput);
|
||||||
// Inject the queries from the input
|
// Inject the queries from the input
|
||||||
@@ -766,14 +784,28 @@ async function generateCodeScanningConfig(codeql, config, featureEnablement, log
|
|||||||
augmentedConfig.packs["javascript"].push(packString);
|
augmentedConfig.packs["javascript"].push(packString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info(`Writing augmented user configuration file to ${configLocation}`);
|
logger.info(`Writing augmented user configuration file to ${codeScanningConfigFile}`);
|
||||||
logger.startGroup("Augmented user configuration file contents");
|
logger.startGroup("Augmented user configuration file contents");
|
||||||
logger.info(yaml.dump(augmentedConfig));
|
logger.info(yaml.dump(augmentedConfig));
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
fs.writeFileSync(configLocation, yaml.dump(augmentedConfig));
|
fs.writeFileSync(codeScanningConfigFile, yaml.dump(augmentedConfig));
|
||||||
return configLocation;
|
return codeScanningConfigFile;
|
||||||
}
|
}
|
||||||
function cloneObject(obj) {
|
function cloneObject(obj) {
|
||||||
return JSON.parse(JSON.stringify(obj));
|
return JSON.parse(JSON.stringify(obj));
|
||||||
}
|
}
|
||||||
|
/**
|
||||||
|
* Gets arguments for passing the code scanning configuration file to interpretation commands like
|
||||||
|
* `codeql database interpret-results` and `codeql database export-diagnostics`.
|
||||||
|
*
|
||||||
|
* Returns an empty list if a code scanning configuration file was not generated by the CLI.
|
||||||
|
*/
|
||||||
|
async function getCodeScanningConfigExportArguments(config, codeql, features) {
|
||||||
|
const codeScanningConfigPath = (0, config_utils_1.getGeneratedCodeScanningConfigPath)(config);
|
||||||
|
if (fs.existsSync(codeScanningConfigPath) &&
|
||||||
|
(await features.getValue(feature_flags_1.Feature.ExportCodeScanningConfigEnabled, codeql))) {
|
||||||
|
return ["--sarif-codescanning-config", codeScanningConfigPath];
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
//# sourceMappingURL=codeql.js.map
|
//# sourceMappingURL=codeql.js.map
|
||||||
File diff suppressed because one or more lines are too long
271
lib/codeql.test.js
generated
271
lib/codeql.test.js
generated
@@ -97,7 +97,7 @@ ava_1.default.beforeEach(() => {
|
|||||||
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
* @returns the download URL for the bundle. This can be passed to the tools parameter of
|
||||||
* `codeql.setupCodeQL`.
|
* `codeql.setupCodeQL`.
|
||||||
*/
|
*/
|
||||||
function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, }) {
|
function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, repo = "github/codeql-action", platformSpecific = true, tagName, }) {
|
||||||
const platform = process.platform === "win32"
|
const platform = process.platform === "win32"
|
||||||
? "win64"
|
? "win64"
|
||||||
: process.platform === "linux"
|
: process.platform === "linux"
|
||||||
@@ -105,7 +105,7 @@ function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, })
|
|||||||
: "osx64";
|
: "osx64";
|
||||||
const baseUrl = apiDetails?.url ?? "https://example.com";
|
const baseUrl = apiDetails?.url ?? "https://example.com";
|
||||||
const relativeUrl = apiDetails
|
const relativeUrl = apiDetails
|
||||||
? `/github/codeql-action/releases/download/${tagName}/codeql-bundle-${platform}.tar.gz`
|
? `/${repo}/releases/download/${tagName}/codeql-bundle${platformSpecific ? `-${platform}` : ""}.tar.gz`
|
||||||
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
: `/download/${tagName}/codeql-bundle.tar.gz`;
|
||||||
(0, nock_1.default)(baseUrl)
|
(0, nock_1.default)(baseUrl)
|
||||||
.get(relativeUrl)
|
.get(relativeUrl)
|
||||||
@@ -114,7 +114,7 @@ function mockDownloadApi({ apiDetails = sampleApiDetails, isPinned, tagName, })
|
|||||||
}
|
}
|
||||||
async function installIntoToolcache({ apiDetails = sampleApiDetails, cliVersion, isPinned, tagName, tmpDir, }) {
|
async function installIntoToolcache({ apiDetails = sampleApiDetails, cliVersion, isPinned, tagName, tmpDir, }) {
|
||||||
const url = mockDownloadApi({ apiDetails, isPinned, tagName });
|
const url = mockDownloadApi({ apiDetails, isPinned, tagName });
|
||||||
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, false, cliVersion !== undefined
|
await codeql.setupCodeQL(cliVersion !== undefined ? undefined : url, apiDetails, tmpDir, util.GitHubVariant.GHES, cliVersion !== undefined
|
||||||
? { cliVersion, tagName, variant: util.GitHubVariant.GHES }
|
? { cliVersion, tagName, variant: util.GitHubVariant.GHES }
|
||||||
: SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
: SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
}
|
}
|
||||||
@@ -153,11 +153,11 @@ function mockApiDetails(apiDetails) {
|
|||||||
tagName: `codeql-bundle-${version}`,
|
tagName: `codeql-bundle-${version}`,
|
||||||
isPinned: false,
|
isPinned: false,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
|
||||||
t.is(result.toolsVersion, `0.0.0-${version}`);
|
t.is(result.toolsVersion, `0.0.0-${version}`);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
}
|
}
|
||||||
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
t.is(toolcache.findAllVersions("CodeQL").length, 2);
|
||||||
});
|
});
|
||||||
@@ -173,11 +173,11 @@ function mockApiDetails(apiDetails) {
|
|||||||
const url = mockDownloadApi({
|
const url = mockDownloadApi({
|
||||||
tagName: "codeql-bundle-20200610",
|
tagName: "codeql-bundle-20200610",
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
|
||||||
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.not(result.toolsDownloadDurationMs, undefined);
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
|
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
|
||||||
@@ -207,75 +207,59 @@ for (const { cliVersion, expectedToolcacheVersion, } of EXPLICITLY_REQUESTED_BUN
|
|||||||
const url = mockDownloadApi({
|
const url = mockDownloadApi({
|
||||||
tagName: "codeql-bundle-20200610",
|
tagName: "codeql-bundle-20200610",
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(url, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
t.assert(releaseApiMock.isDone(), "Releases API should have been called");
|
||||||
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
|
||||||
t.deepEqual(result.toolsVersion, cliVersion);
|
t.deepEqual(result.toolsVersion, cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.not(result.toolsDownloadDurationMs, undefined);
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
for (const { isCached, tagName, toolcacheCliVersion } of [
|
for (const { githubReleases, toolcacheVersion } of [
|
||||||
|
// Test that we use the tools from the toolcache when `SAMPLE_DEFAULT_CLI_VERSION` is requested
|
||||||
|
// and `SAMPLE_DEFAULT_CLI_VERSION-` is in the toolcache.
|
||||||
{
|
{
|
||||||
isCached: true,
|
toolcacheVersion: SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
|
||||||
tagName: "codeql-bundle-20230101",
|
|
||||||
toolcacheCliVersion: SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
isCached: true,
|
githubReleases: {
|
||||||
// By leaving toolcacheCliVersion undefined, the bundle will be installed
|
"codeql-bundle-20230101": `cli-version-${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}.txt`,
|
||||||
// into the toolcache as `${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`.
|
},
|
||||||
// This lets us test that `x.y.z-yyyymmdd` toolcache versions are used if an
|
toolcacheVersion: "0.0.0-20230101",
|
||||||
// `x.y.z` version isn't in the toolcache.
|
|
||||||
tagName: `codeql-bundle-${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
isCached: false,
|
toolcacheVersion: `${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
|
||||||
tagName: "codeql-bundle-20230101",
|
|
||||||
},
|
},
|
||||||
]) {
|
]) {
|
||||||
(0, ava_1.default)(`uses default version on Dotcom when default version bundle ${tagName} is ${isCached ? "" : "not "}cached`, async (t) => {
|
(0, ava_1.default)(`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
|
||||||
|
`${toolcacheVersion} is installed`, async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
if (isCached) {
|
sinon
|
||||||
await installIntoToolcache({
|
.stub(toolcache, "find")
|
||||||
cliVersion: toolcacheCliVersion,
|
.withArgs("CodeQL", toolcacheVersion)
|
||||||
tagName,
|
.returns("path/to/cached/codeql");
|
||||||
isPinned: true,
|
sinon.stub(toolcache, "findAllVersions").returns([toolcacheVersion]);
|
||||||
tmpDir,
|
if (githubReleases) {
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
mockDownloadApi({
|
|
||||||
tagName,
|
|
||||||
});
|
|
||||||
sinon.stub(api, "getApiClient").value(() => ({
|
sinon.stub(api, "getApiClient").value(() => ({
|
||||||
repos: {
|
repos: {
|
||||||
listReleases: sinon.stub().resolves(undefined),
|
listReleases: sinon.stub().resolves(undefined),
|
||||||
},
|
},
|
||||||
paginate: sinon.stub().resolves([
|
paginate: sinon.stub().resolves(Object.entries(githubReleases).map(([releaseTagName, cliVersionMarkerFile]) => ({
|
||||||
{
|
assets: [
|
||||||
assets: [
|
{
|
||||||
{
|
name: cliVersionMarkerFile,
|
||||||
name: "cli-version-2.0.0.txt",
|
},
|
||||||
},
|
],
|
||||||
],
|
tag_name: releaseTagName,
|
||||||
tag_name: tagName,
|
}))),
|
||||||
},
|
|
||||||
]),
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.is(result.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
t.is(result.toolsVersion, SAMPLE_DEFAULT_CLI_VERSION.cliVersion);
|
||||||
if (isCached) {
|
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Toolcache);
|
t.is(result.toolsDownloadDurationMs, undefined);
|
||||||
t.is(result.toolsDownloadDurationMs, undefined);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -288,7 +272,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
isPinned: true,
|
isPinned: true,
|
||||||
tmpDir,
|
tmpDir,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
variant,
|
variant,
|
||||||
@@ -311,14 +295,14 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
mockDownloadApi({
|
mockDownloadApi({
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, false, {
|
const result = await codeql.setupCodeQL(undefined, sampleApiDetails, tmpDir, variant, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
variant,
|
variant,
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
@@ -335,49 +319,81 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
mockDownloadApi({
|
mockDownloadApi({
|
||||||
tagName: defaults.bundleVersion,
|
tagName: defaults.bundleVersion,
|
||||||
});
|
});
|
||||||
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, false, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
const result = await codeql.setupCodeQL("latest", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
t.deepEqual(result.toolsVersion, defaults.cliVersion);
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 2);
|
t.is(cachedVersions.length, 2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("download codeql bundle from github ae endpoint", async (t) => {
|
for (const isBundleVersionInUrl of [true, false]) {
|
||||||
|
const inclusionString = isBundleVersionInUrl
|
||||||
|
? "includes"
|
||||||
|
: "does not include";
|
||||||
|
(0, ava_1.default)(`download codeql bundle from github ae endpoint (URL ${inclusionString} bundle version)`, async (t) => {
|
||||||
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
|
const bundleAssetID = 10;
|
||||||
|
const platform = process.platform === "win32"
|
||||||
|
? "win64"
|
||||||
|
: process.platform === "linux"
|
||||||
|
? "linux64"
|
||||||
|
: "osx64";
|
||||||
|
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
||||||
|
const eventualDownloadUrl = isBundleVersionInUrl
|
||||||
|
? `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`
|
||||||
|
: `https://example.githubenterprise.com/api/v3/repos/github/codeql-action/releases/assets/${bundleAssetID}`;
|
||||||
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
||||||
|
.reply(200, {
|
||||||
|
assets: { [codeQLBundleName]: bundleAssetID },
|
||||||
|
});
|
||||||
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
|
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
||||||
|
.reply(200, {
|
||||||
|
url: eventualDownloadUrl,
|
||||||
|
});
|
||||||
|
(0, nock_1.default)("https://example.githubenterprise.com")
|
||||||
|
.get(eventualDownloadUrl.replace("https://example.githubenterprise.com", ""))
|
||||||
|
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
||||||
|
mockApiDetails(sampleGHAEApiDetails);
|
||||||
|
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
||||||
|
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
||||||
|
const result = await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, {
|
||||||
|
cliVersion: defaults.cliVersion,
|
||||||
|
tagName: defaults.bundleVersion,
|
||||||
|
variant: util.GitHubVariant.GHAE,
|
||||||
|
}, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
|
t.assert(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
|
t.is(cachedVersions.length, 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
(0, ava_1.default)("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
|
||||||
await util.withTmpDir(async (tmpDir) => {
|
await util.withTmpDir(async (tmpDir) => {
|
||||||
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
(0, testing_utils_1.setupActionsVars)(tmpDir, tmpDir);
|
||||||
const bundleAssetID = 10;
|
mockApiDetails(sampleApiDetails);
|
||||||
const platform = process.platform === "win32"
|
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
|
||||||
? "win64"
|
const releasesApiMock = mockReleaseApi({
|
||||||
: process.platform === "linux"
|
assetNames: ["cli-version-2.12.2.txt"],
|
||||||
? "linux64"
|
tagName: "codeql-bundle-20230203",
|
||||||
: "osx64";
|
|
||||||
const codeQLBundleName = `codeql-bundle-${platform}.tar.gz`;
|
|
||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/find/${defaults.bundleVersion}`)
|
|
||||||
.reply(200, {
|
|
||||||
assets: { [codeQLBundleName]: bundleAssetID },
|
|
||||||
});
|
});
|
||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
mockDownloadApi({
|
||||||
.get(`/api/v3/enterprise/code-scanning/codeql-bundle/download/${bundleAssetID}`)
|
repo: "dsp-testing/codeql-cli-nightlies",
|
||||||
.reply(200, {
|
platformSpecific: false,
|
||||||
url: `https://example.githubenterprise.com/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`,
|
tagName: "codeql-bundle-20230203",
|
||||||
});
|
});
|
||||||
(0, nock_1.default)("https://example.githubenterprise.com")
|
const result = await codeql.setupCodeQL("https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", sampleApiDetails, tmpDir, util.GitHubVariant.DOTCOM, SAMPLE_DEFAULT_CLI_VERSION, (0, logging_1.getRunnerLogger)(true), false);
|
||||||
.get(`/github/codeql-action/releases/download/${defaults.bundleVersion}/${codeQLBundleName}`)
|
t.is(result.toolsVersion, "0.0.0-20230203");
|
||||||
.replyWithFile(200, path_1.default.join(__dirname, `/../src/testdata/codeql-bundle-pinned.tar.gz`));
|
|
||||||
mockApiDetails(sampleGHAEApiDetails);
|
|
||||||
sinon.stub(actionsUtil, "isRunningLocalAction").returns(false);
|
|
||||||
process.env["GITHUB_ACTION_REPOSITORY"] = "github/codeql-action";
|
|
||||||
const result = await codeql.setupCodeQL(undefined, sampleGHAEApiDetails, tmpDir, util.GitHubVariant.GHAE, false, {
|
|
||||||
cliVersion: defaults.cliVersion,
|
|
||||||
tagName: defaults.bundleVersion,
|
|
||||||
variant: util.GitHubVariant.GHAE,
|
|
||||||
}, (0, logging_1.getRunnerLogger)(true), false);
|
|
||||||
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
t.is(result.toolsSource, init_1.ToolsSource.Download);
|
||||||
t.is(typeof result.toolsDownloadDurationMs, "number");
|
t.true(Number.isInteger(result.toolsDownloadDurationMs));
|
||||||
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
const cachedVersions = toolcache.findAllVersions("CodeQL");
|
||||||
t.is(cachedVersions.length, 1);
|
t.is(cachedVersions.length, 1);
|
||||||
|
t.is(cachedVersions[0], "0.0.0-20230203");
|
||||||
|
t.false(releasesApiMock.isDone());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
(0, ava_1.default)("getExtraOptions works for explicit paths", (t) => {
|
||||||
@@ -408,7 +424,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||||
// safeWhich throws because of the test CodeQL object.
|
// safeWhich throws because of the test CodeQL object.
|
||||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||||
@@ -417,7 +433,7 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||||
// safeWhich throws because of the test CodeQL object.
|
// safeWhich throws because of the test CodeQL object.
|
||||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||||
@@ -436,11 +452,11 @@ for (const variant of [util.GitHubVariant.GHAE, util.GitHubVariant.GHES]) {
|
|||||||
packsInputCombines: false,
|
packsInputCombines: false,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should NOT have used an config file
|
// should NOT have used an config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||||
t.falsy(configArg, "Should have injected a codescanning config");
|
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
// Test macro for ensuring different variants of injected augmented configurations
|
// Test macro for ensuring different variants of injected augmented configurations
|
||||||
@@ -458,7 +474,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
tempDir,
|
tempDir,
|
||||||
augmentationProperties,
|
augmentationProperties,
|
||||||
};
|
};
|
||||||
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster(thisStubConfig, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used an config file
|
// should have used an config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
||||||
@@ -649,24 +665,67 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
queries: [],
|
queries: [],
|
||||||
},
|
},
|
||||||
}, {});
|
}, {});
|
||||||
(0, ava_1.default)("does not use injected config", async (t) => {
|
(0, ava_1.default)("does not pass a code scanning config or qlconfig file to the CLI when CLI config passing is disabled", async (t) => {
|
||||||
const origCODEQL_PASS_CONFIG_TO_CLI = process.env.CODEQL_PASS_CONFIG_TO_CLI;
|
await util.withTmpDir(async (tempDir) => {
|
||||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = "false";
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
try {
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
// stubbed version doesn't matter. It just needs to be valid semver.
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
// should not have used a config file
|
||||||
|
const hasConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
|
t.false(hasConfigArg, "Should NOT have injected a codescanning config");
|
||||||
|
// should not have passed a qlconfig file
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "Should NOT have passed a qlconfig file");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("passes a code scanning config AND qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon
|
sinon
|
||||||
.stub(codeqlObject, "getVersion")
|
.stub(codeqlObject, "getVersion")
|
||||||
.resolves(feature_flags_1.featureConfig[feature_flags_1.Feature.CliConfigFileEnabled].minimumVersion);
|
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||||
await codeqlObject.databaseInitCluster(stubConfig, "", undefined, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
const args = runnerConstructorStub.firstCall.args[1];
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
// should have used an config file
|
// should have used a config file
|
||||||
const configArg = args.find((arg) => arg.startsWith("--codescanning-config="));
|
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
t.falsy(configArg, "Should NOT have injected a codescanning config");
|
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
|
||||||
}
|
// should have passed a qlconfig file
|
||||||
finally {
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
|
||||||
}
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("passes a code scanning config BUT NOT a qlconfig to the CLI when CLI config passing is enabled", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeqlObject, "getVersion").resolves("2.12.2");
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), "/path/to/qlconfig.yml", (0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
// should have used a config file
|
||||||
|
const hasCodeScanningConfigArg = args.some((arg) => arg.startsWith("--codescanning-config="));
|
||||||
|
t.true(hasCodeScanningConfigArg, "Should have injected a codescanning config");
|
||||||
|
// should not have passed a qlconfig file
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("does not pass a qlconfig to the CLI when it is undefined", async (t) => {
|
||||||
|
await util.withTmpDir(async (tempDir) => {
|
||||||
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon
|
||||||
|
.stub(codeqlObject, "getVersion")
|
||||||
|
.resolves(codeql.CODEQL_VERSION_INIT_WITH_QLCONFIG);
|
||||||
|
await codeqlObject.databaseInitCluster({ ...stubConfig, tempDir }, "", undefined, (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.CliConfigFileEnabled]), undefined, // undefined qlconfigFile
|
||||||
|
(0, logging_1.getRunnerLogger)(true));
|
||||||
|
const args = runnerConstructorStub.firstCall.args[1];
|
||||||
|
const hasQlconfigArg = args.some((arg) => arg.startsWith("--qlconfig-file="));
|
||||||
|
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
|
||||||
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
@@ -674,7 +733,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||||
// safeWhich throws because of the test CodeQL object.
|
// safeWhich throws because of the test CodeQL object.
|
||||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||||
@@ -683,7 +742,7 @@ const injectedConfigMacro = ava_1.default.macro({
|
|||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||||
// safeWhich throws because of the test CodeQL object.
|
// safeWhich throws because of the test CodeQL object.
|
||||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]));
|
||||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||||
});
|
});
|
||||||
function stubToolRunnerConstructor() {
|
function stubToolRunnerConstructor() {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
130
lib/config-utils.js
generated
130
lib/config-utils.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
exports.getGeneratedCodeScanningConfigPath = exports.wrapEnvironment = exports.generateRegistries = exports.downloadPacks = exports.getConfig = exports.getPathToParsedConfigFile = exports.initConfig = exports.parsePacks = exports.validatePackSpecification = exports.prettyPrintPack = exports.parsePacksSpecification = exports.parsePacksFromConfig = exports.calculateAugmentation = exports.getDefaultConfig = exports.getRawLanguages = exports.getLanguages = exports.getLanguagesInRepo = exports.getUnknownLanguagesError = exports.getNoLanguagesError = exports.getConfigFileDirectoryGivenMessage = exports.getConfigFileFormatInvalidMessage = exports.getConfigFileRepoFormatInvalidMessage = exports.getConfigFileDoesNotExistErrorMessage = exports.getConfigFileOutsideWorkspaceErrorMessage = exports.getLocalPathDoesNotExist = exports.getLocalPathOutsideOfRepository = exports.getPacksStrInvalid = exports.getPacksInvalid = exports.getPacksInvalidSplit = exports.getPathsInvalid = exports.getPathsIgnoreInvalid = exports.getQueryUsesInvalid = exports.getQueriesMissingUses = exports.getQueriesInvalid = exports.getDisableDefaultQueriesInvalid = exports.getNameInvalid = exports.validateAndSanitisePath = exports.defaultAugmentationProperties = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -131,18 +131,28 @@ async function addDefaultQueries(codeQL, languages, resultMap) {
|
|||||||
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
await runResolveQueries(codeQL, resultMap, suites, undefined);
|
||||||
}
|
}
|
||||||
// The set of acceptable values for built-in suites from the codeql bundle
|
// The set of acceptable values for built-in suites from the codeql bundle
|
||||||
const builtinSuites = ["security-extended", "security-and-quality"];
|
const builtinSuites = [
|
||||||
|
"security-experimental",
|
||||||
|
"security-extended",
|
||||||
|
"security-and-quality",
|
||||||
|
];
|
||||||
/**
|
/**
|
||||||
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
* Determine the set of queries associated with suiteName's suites and add them to resultMap.
|
||||||
* Throws an error if suiteName is not a valid builtin suite.
|
* Throws an error if suiteName is not a valid builtin suite.
|
||||||
* May inject ML queries, and the return value will declare if this was done.
|
* May inject ML queries, and the return value will declare if this was done.
|
||||||
*/
|
*/
|
||||||
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, featureEnablement, configFile) {
|
async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suiteName, features, configFile) {
|
||||||
let injectedMlQueries = false;
|
let injectedMlQueries = false;
|
||||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||||
if (!found) {
|
if (!found) {
|
||||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||||
}
|
}
|
||||||
|
if (suiteName === "security-experimental" &&
|
||||||
|
!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE))) {
|
||||||
|
throw new Error(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||||
|
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE}. Please upgrade to CodeQL CLI version
|
||||||
|
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE} or later.`);
|
||||||
|
}
|
||||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||||
@@ -151,9 +161,11 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
|||||||
(process.platform !== "win32" ||
|
(process.platform !== "win32" ||
|
||||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||||
languages.includes("javascript") &&
|
languages.includes("javascript") &&
|
||||||
(found === "security-extended" || found === "security-and-quality") &&
|
(found === "security-experimental" ||
|
||||||
|
found === "security-extended" ||
|
||||||
|
found === "security-and-quality") &&
|
||||||
!packs.javascript?.some(isMlPoweredJsQueriesPack) &&
|
!packs.javascript?.some(isMlPoweredJsQueriesPack) &&
|
||||||
(await featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
(await features.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, codeQL))) {
|
||||||
if (!packs.javascript) {
|
if (!packs.javascript) {
|
||||||
packs.javascript = [];
|
packs.javascript = [];
|
||||||
}
|
}
|
||||||
@@ -228,7 +240,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
|||||||
*
|
*
|
||||||
* @returns whether or not we injected ML queries into the packs
|
* @returns whether or not we injected ML queries into the packs
|
||||||
*/
|
*/
|
||||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile) {
|
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, features, logger, configFile) {
|
||||||
queryUses = queryUses.trim();
|
queryUses = queryUses.trim();
|
||||||
if (queryUses === "") {
|
if (queryUses === "") {
|
||||||
throw new Error(getQueryUsesInvalid(configFile));
|
throw new Error(getQueryUsesInvalid(configFile));
|
||||||
@@ -240,12 +252,12 @@ async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, te
|
|||||||
}
|
}
|
||||||
// Check for one of the builtin suites
|
// Check for one of the builtin suites
|
||||||
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
if (queryUses.indexOf("/") === -1 && queryUses.indexOf("@") === -1) {
|
||||||
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, featureEnablement, configFile);
|
return await addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, queryUses, features, configFile);
|
||||||
}
|
}
|
||||||
// Otherwise, must be a reference to another repo.
|
// Otherwise, must be a reference to another repo.
|
||||||
// If config parsing is handled in CLI, then this repo will be downloaded
|
// If config parsing is handled in CLI, then this repo will be downloaded
|
||||||
// later by the CLI.
|
// later by the CLI.
|
||||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||||
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
await addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@@ -491,13 +503,13 @@ async function getRawLanguages(languagesInput, repository, logger) {
|
|||||||
return { rawLanguages, autodetected };
|
return { rawLanguages, autodetected };
|
||||||
}
|
}
|
||||||
exports.getRawLanguages = getRawLanguages;
|
exports.getRawLanguages = getRawLanguages;
|
||||||
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger) {
|
async function addQueriesAndPacksFromWorkflow(codeQL, queriesInput, languages, resultMap, packs, tempDir, workspacePath, apiDetails, features, logger) {
|
||||||
let injectedMlQueries = false;
|
let injectedMlQueries = false;
|
||||||
queriesInput = queriesInput.trim();
|
queriesInput = queriesInput.trim();
|
||||||
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
// "+" means "don't override config file" - see shouldAddConfigFileQueries
|
||||||
queriesInput = queriesInput.replace(/^\+/, "");
|
queriesInput = queriesInput.replace(/^\+/, "");
|
||||||
for (const query of queriesInput.split(",")) {
|
for (const query of queriesInput.split(",")) {
|
||||||
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
const didInject = await parseQueryUses(languages, codeQL, resultMap, packs, query, tempDir, workspacePath, apiDetails, features, logger);
|
||||||
injectedMlQueries = injectedMlQueries || didInject;
|
injectedMlQueries = injectedMlQueries || didInject;
|
||||||
}
|
}
|
||||||
return injectedMlQueries;
|
return injectedMlQueries;
|
||||||
@@ -515,7 +527,7 @@ function shouldAddConfigFileQueries(queriesInput) {
|
|||||||
/**
|
/**
|
||||||
* Get the default config for when the user has not supplied one.
|
* Get the default config for when the user has not supplied one.
|
||||||
*/
|
*/
|
||||||
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||||
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
||||||
const queries = {};
|
const queries = {};
|
||||||
for (const language of languages) {
|
for (const language of languages) {
|
||||||
@@ -533,7 +545,7 @@ async function getDefaultConfig(languagesInput, rawQueriesInput, rawPacksInput,
|
|||||||
: {};
|
: {};
|
||||||
if (rawQueriesInput) {
|
if (rawQueriesInput) {
|
||||||
augmentationProperties.injectedMlQueries =
|
augmentationProperties.injectedMlQueries =
|
||||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, features, logger);
|
||||||
}
|
}
|
||||||
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
const { trapCaches, trapCacheDownloadTime } = await downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logger);
|
||||||
return {
|
return {
|
||||||
@@ -569,7 +581,7 @@ async function downloadCacheWithTime(trapCachingEnabled, codeQL, languages, logg
|
|||||||
/**
|
/**
|
||||||
* Load the config from the given file.
|
* Load the config from the given file.
|
||||||
*/
|
*/
|
||||||
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||||
let parsedYAML;
|
let parsedYAML;
|
||||||
if (isLocal(configFile)) {
|
if (isLocal(configFile)) {
|
||||||
// Treat the config file as relative to the workspace
|
// Treat the config file as relative to the workspace
|
||||||
@@ -617,7 +629,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
|||||||
// in the config file.
|
// in the config file.
|
||||||
if (rawQueriesInput) {
|
if (rawQueriesInput) {
|
||||||
augmentationProperties.injectedMlQueries =
|
augmentationProperties.injectedMlQueries =
|
||||||
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, featureEnablement, logger);
|
await addQueriesAndPacksFromWorkflow(codeQL, rawQueriesInput, languages, queries, packs, tempDir, workspacePath, apiDetails, features, logger);
|
||||||
}
|
}
|
||||||
if (shouldAddConfigFileQueries(rawQueriesInput) &&
|
if (shouldAddConfigFileQueries(rawQueriesInput) &&
|
||||||
QUERIES_PROPERTY in parsedYAML) {
|
QUERIES_PROPERTY in parsedYAML) {
|
||||||
@@ -629,7 +641,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
|||||||
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||||
throw new Error(getQueriesMissingUses(configFile));
|
throw new Error(getQueriesMissingUses(configFile));
|
||||||
}
|
}
|
||||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, featureEnablement, logger, configFile);
|
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, features, logger, configFile);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||||
@@ -713,7 +725,7 @@ function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
|||||||
}
|
}
|
||||||
const trimmedInput = queriesInputCombines
|
const trimmedInput = queriesInputCombines
|
||||||
? rawQueriesInput.trim().slice(1).trim()
|
? rawQueriesInput.trim().slice(1).trim()
|
||||||
: rawQueriesInput?.trim();
|
: rawQueriesInput?.trim() ?? "";
|
||||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||||
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||||
}
|
}
|
||||||
@@ -893,7 +905,8 @@ exports.parsePacks = parsePacks;
|
|||||||
* Without a '+', an input value will override the corresponding value in the config file.
|
* Without a '+', an input value will override the corresponding value in the config file.
|
||||||
*
|
*
|
||||||
* @param inputValue The input value to process.
|
* @param inputValue The input value to process.
|
||||||
* @returns true if the input value should replace the corresponding value in the config file, false if it should be appended.
|
* @returns true if the input value should replace the corresponding value in the config file,
|
||||||
|
* false if it should be appended.
|
||||||
*/
|
*/
|
||||||
function shouldCombine(inputValue) {
|
function shouldCombine(inputValue) {
|
||||||
return !!inputValue?.trim().startsWith("+");
|
return !!inputValue?.trim().startsWith("+");
|
||||||
@@ -919,21 +932,21 @@ function dbLocationOrDefault(dbLocation, tempDir) {
|
|||||||
* This will parse the config from the user input if present, or generate
|
* This will parse the config from the user input if present, or generate
|
||||||
* a default config. The parsed config is then stored to a known location.
|
* a default config. The parsed config is then stored to a known location.
|
||||||
*/
|
*/
|
||||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||||
let config;
|
let config;
|
||||||
// If no config file was provided create an empty one
|
// If no config file was provided create an empty one
|
||||||
if (!configFile) {
|
if (!configFile) {
|
||||||
logger.debug("No configuration file was provided");
|
logger.debug("No configuration file was provided");
|
||||||
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
config = await getDefaultConfig(languagesInput, queriesInput, packsInput, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
config = await loadConfig(languagesInput, queriesInput, packsInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||||
}
|
}
|
||||||
// When using the codescanning config in the CLI, pack downloads
|
// When using the codescanning config in the CLI, pack downloads
|
||||||
// happen in the CLI during the `database init` command, so no need
|
// happen in the CLI during the `database init` command, so no need
|
||||||
// to download them here.
|
// to download them here.
|
||||||
await (0, util_1.logCodeScanningConfigInCli)(codeQL, featureEnablement, logger);
|
await (0, util_1.logCodeScanningConfigInCli)(codeQL, features, logger);
|
||||||
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, featureEnablement))) {
|
if (!(await (0, util_1.useCodeScanningConfigInCli)(codeQL, features))) {
|
||||||
// The list of queries should not be empty for any language. If it is then
|
// The list of queries should not be empty for any language. If it is then
|
||||||
// it is a user configuration error.
|
// it is a user configuration error.
|
||||||
// This check occurs in the CLI when it parses the config file.
|
// This check occurs in the CLI when it parses the config file.
|
||||||
@@ -946,8 +959,7 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
|||||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const registries = parseRegistries(registriesInput);
|
await downloadPacks(codeQL, config.languages, config.packs, apiDetails, registriesInput, config.tempDir, logger);
|
||||||
await downloadPacks(codeQL, config.languages, config.packs, registries, apiDetails, config.tempDir, logger);
|
|
||||||
}
|
}
|
||||||
// Save the config so we can easily access it again in the future
|
// Save the config so we can easily access it again in the future
|
||||||
await saveConfig(config, logger);
|
await saveConfig(config, logger);
|
||||||
@@ -1043,21 +1055,9 @@ async function getConfig(tempDir, logger) {
|
|||||||
return JSON.parse(configString);
|
return JSON.parse(configString);
|
||||||
}
|
}
|
||||||
exports.getConfig = getConfig;
|
exports.getConfig = getConfig;
|
||||||
async function downloadPacks(codeQL, languages, packs, registries, apiDetails, tmpDir, logger) {
|
async function downloadPacks(codeQL, languages, packs, apiDetails, registriesInput, tempDir, logger) {
|
||||||
let qlconfigFile;
|
// This code path is only used when config parsing occurs in the Action.
|
||||||
let registriesAuthTokens;
|
const { registriesAuthTokens, qlconfigFile } = await generateRegistries(registriesInput, codeQL, tempDir, logger);
|
||||||
if (registries) {
|
|
||||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
|
||||||
throw new Error(`'registries' input is not supported on CodeQL versions less than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}.`);
|
|
||||||
}
|
|
||||||
// generate a qlconfig.yml file to hold the registry configs.
|
|
||||||
const qlconfig = createRegistriesBlock(registries);
|
|
||||||
qlconfigFile = path.join(tmpDir, "qlconfig.yml");
|
|
||||||
fs.writeFileSync(qlconfigFile, yaml.dump(qlconfig), "utf8");
|
|
||||||
registriesAuthTokens = registries
|
|
||||||
.map((registry) => `${registry.url}=${registry.token}`)
|
|
||||||
.join(",");
|
|
||||||
}
|
|
||||||
await wrapEnvironment({
|
await wrapEnvironment({
|
||||||
GITHUB_TOKEN: apiDetails.auth,
|
GITHUB_TOKEN: apiDetails.auth,
|
||||||
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||||
@@ -1085,6 +1085,48 @@ async function downloadPacks(codeQL, languages, packs, registries, apiDetails, t
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadPacks = downloadPacks;
|
exports.downloadPacks = downloadPacks;
|
||||||
|
/**
|
||||||
|
* Generate a `qlconfig.yml` file from the `registries` input.
|
||||||
|
* This file is used by the CodeQL CLI to list the registries to use for each
|
||||||
|
* pack.
|
||||||
|
*
|
||||||
|
* @param registriesInput The value of the `registries` input.
|
||||||
|
* @param codeQL a codeQL object, used only for checking the version of CodeQL.
|
||||||
|
* @param tempDir a temporary directory to store the generated qlconfig.yml file.
|
||||||
|
* @param logger a logger object.
|
||||||
|
* @returns The path to the generated `qlconfig.yml` file and the auth tokens to
|
||||||
|
* use for each registry.
|
||||||
|
*/
|
||||||
|
async function generateRegistries(registriesInput, codeQL, tempDir, logger) {
|
||||||
|
const registries = parseRegistries(registriesInput);
|
||||||
|
let registriesAuthTokens;
|
||||||
|
let qlconfigFile;
|
||||||
|
if (registries) {
|
||||||
|
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||||
|
throw new Error(`The 'registries' input is not supported on CodeQL CLI versions earlier than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}. Please upgrade to CodeQL CLI version ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD} or later.`);
|
||||||
|
}
|
||||||
|
// generate a qlconfig.yml file to hold the registry configs.
|
||||||
|
const qlconfig = createRegistriesBlock(registries);
|
||||||
|
qlconfigFile = path.join(tempDir, "qlconfig.yml");
|
||||||
|
const qlconfigContents = yaml.dump(qlconfig);
|
||||||
|
fs.writeFileSync(qlconfigFile, qlconfigContents, "utf8");
|
||||||
|
logger.debug("Generated qlconfig.yml:");
|
||||||
|
logger.debug(qlconfigContents);
|
||||||
|
registriesAuthTokens = registries
|
||||||
|
.map((registry) => `${registry.url}=${registry.token}`)
|
||||||
|
.join(",");
|
||||||
|
}
|
||||||
|
if (typeof process.env.CODEQL_REGISTRIES_AUTH === "string") {
|
||||||
|
logger.debug("Using CODEQL_REGISTRIES_AUTH environment variable to authenticate with registries.");
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
registriesAuthTokens:
|
||||||
|
// if the user has explicitly set the CODEQL_REGISTRIES_AUTH env var then use that
|
||||||
|
process.env.CODEQL_REGISTRIES_AUTH ?? registriesAuthTokens,
|
||||||
|
qlconfigFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.generateRegistries = generateRegistries;
|
||||||
function createRegistriesBlock(registries) {
|
function createRegistriesBlock(registries) {
|
||||||
if (!Array.isArray(registries) ||
|
if (!Array.isArray(registries) ||
|
||||||
registries.some((r) => !r.url || !r.packages)) {
|
registries.some((r) => !r.url || !r.packages)) {
|
||||||
@@ -1134,4 +1176,14 @@ async function wrapEnvironment(env, operation) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.wrapEnvironment = wrapEnvironment;
|
||||||
|
/**
|
||||||
|
* Get the path to the code scanning configuration generated by the CLI.
|
||||||
|
*
|
||||||
|
* This will not exist if the configuration is being parsed in the Action.
|
||||||
|
*/
|
||||||
|
function getGeneratedCodeScanningConfigPath(config) {
|
||||||
|
return path.resolve(config.tempDir, "user-config.yaml");
|
||||||
|
}
|
||||||
|
exports.getGeneratedCodeScanningConfigPath = getGeneratedCodeScanningConfigPath;
|
||||||
//# sourceMappingURL=config-utils.js.map
|
//# sourceMappingURL=config-utils.js.map
|
||||||
File diff suppressed because one or more lines are too long
85
lib/config-utils.test.js
generated
85
lib/config-utils.test.js
generated
@@ -1014,7 +1014,7 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||||
// `security-extended` or `security-and-quality` query suite.
|
// `security-extended`, `security-and-quality`, or `security-experimental` query suite.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||||
// versions of the CodeQL CLI prior to 2.9.0.
|
// versions of the CodeQL CLI prior to 2.9.0.
|
||||||
@@ -1042,6 +1042,9 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
|||||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||||
// CLI 2.11.3+.
|
// CLI 2.11.3+.
|
||||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
||||||
|
// Test that ML-powered queries are run on all platforms running `security-experimental` on CodeQL
|
||||||
|
// CLI 2.12.1+.
|
||||||
|
(0, ava_1.default)(mlPoweredQueriesMacro, "2.12.1", true, undefined, "security-experimental", "~0.4.0");
|
||||||
const calculateAugmentationMacro = ava_1.default.macro({
|
const calculateAugmentationMacro = ava_1.default.macro({
|
||||||
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
exec: async (t, _title, rawPacksInput, rawQueriesInput, languages, expectedAugmentationProperties) => {
|
||||||
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
const actualAugmentationProperties = configUtils.calculateAugmentation(rawPacksInput, rawQueriesInput, languages);
|
||||||
@@ -1111,8 +1114,8 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
java: ["a", "b"],
|
java: ["a", "b"],
|
||||||
go: ["c", "d"],
|
go: ["c", "d"],
|
||||||
python: ["e", "f"],
|
python: ["e", "f"],
|
||||||
}, undefined, // registries
|
}, sampleApiDetails, undefined, // registriesAuthTokens
|
||||||
sampleApiDetails, tmpDir, logger);
|
tmpDir, logger);
|
||||||
// Expecting packs to be downloaded once for java and once for python
|
// Expecting packs to be downloaded once for java and once for python
|
||||||
t.deepEqual(packDownloadStub.callCount, 2);
|
t.deepEqual(packDownloadStub.callCount, 2);
|
||||||
// no config file was created, so pass `undefined` as the config file path
|
// no config file was created, so pass `undefined` as the config file path
|
||||||
@@ -1125,9 +1128,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
// associated env vars
|
// associated env vars
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = undefined;
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
// no slash
|
// no slash
|
||||||
url: "http://ghcr.io",
|
url: "http://ghcr.io",
|
||||||
@@ -1140,8 +1143,9 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
// append a slash to the first url
|
// append a slash to the first url
|
||||||
|
const registries = yaml.load(registriesInput);
|
||||||
const expectedRegistries = registries.map((r, i) => ({
|
const expectedRegistries = registries.map((r, i) => ({
|
||||||
packages: r.packages,
|
packages: r.packages,
|
||||||
url: i === 0 ? `${r.url}/` : r.url,
|
url: i === 0 ? `${r.url}/` : r.url,
|
||||||
@@ -1170,7 +1174,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
java: ["a", "b"],
|
java: ["a", "b"],
|
||||||
go: ["c", "d"],
|
go: ["c", "d"],
|
||||||
python: ["e", "f"],
|
python: ["e", "f"],
|
||||||
}, registries, sampleApiDetails, tmpDir, logger);
|
}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
// Same packs are downloaded as in previous test
|
// Same packs are downloaded as in previous test
|
||||||
t.deepEqual(packDownloadStub.callCount, 2);
|
t.deepEqual(packDownloadStub.callCount, 2);
|
||||||
t.deepEqual(packDownloadStub.firstCall.args, [
|
t.deepEqual(packDownloadStub.firstCall.args, [
|
||||||
@@ -1183,7 +1187,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
]);
|
]);
|
||||||
// Verify that the env vars were unset.
|
// Verify that the env vars were unset.
|
||||||
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
t.deepEqual(process.env.GITHUB_TOKEN, "not-a-token");
|
||||||
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, "not-a-registries-auth");
|
t.deepEqual(process.env.CODEQL_REGISTRIES_AUTH, undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
||||||
@@ -1193,7 +1197,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
url: "http://ghcr.io",
|
url: "http://ghcr.io",
|
||||||
packages: ["codeql/*", "dsp-testing/*"],
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
@@ -1204,12 +1208,12 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
const codeQL = (0, codeql_1.setCodeQL)({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
getVersion: () => Promise.resolve("2.10.3"),
|
getVersion: () => Promise.resolve("2.10.3"),
|
||||||
});
|
});
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
}, { instanceOf: Error }, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -1220,7 +1224,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
process.env.GITHUB_TOKEN = "not-a-token";
|
process.env.GITHUB_TOKEN = "not-a-token";
|
||||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
const registries = [
|
const registriesInput = yaml.dump([
|
||||||
{
|
{
|
||||||
// missing url property
|
// missing url property
|
||||||
packages: ["codeql/*", "dsp-testing/*"],
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
@@ -1231,15 +1235,68 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
|||||||
packages: "semmle/*",
|
packages: "semmle/*",
|
||||||
token: "still-not-a-token",
|
token: "still-not-a-token",
|
||||||
},
|
},
|
||||||
];
|
]);
|
||||||
const codeQL = (0, codeql_1.setCodeQL)({
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
getVersion: () => Promise.resolve("2.10.4"),
|
getVersion: () => Promise.resolve("2.10.4"),
|
||||||
});
|
});
|
||||||
await t.throwsAsync(async () => {
|
await t.throwsAsync(async () => {
|
||||||
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, registries, sampleApiDetails, tmpDir, logger);
|
return await configUtils.downloadPacks(codeQL, [languages_1.Language.javascript, languages_1.Language.java, languages_1.Language.python], {}, sampleApiDetails, registriesInput, tmpDir, logger);
|
||||||
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
}, { instanceOf: Error }, "Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
// the happy path for generateRegistries is already tested in downloadPacks.
|
||||||
|
// these following tests are for the error cases and when nothing is generated.
|
||||||
|
(0, ava_1.default)("no generateRegistries when CLI is too old", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const registriesInput = yaml.dump([
|
||||||
|
{
|
||||||
|
// no slash
|
||||||
|
url: "http://ghcr.io",
|
||||||
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
|
token: "not-a-token",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve("2.10.3"),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
await t.throwsAsync(async () => await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger), undefined, "'registries' input is not supported on CodeQL versions less than 2.10.4.");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("no generateRegistries when registries is undefined", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
const registriesInput = undefined;
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||||
|
t.is(registriesAuthTokens, undefined);
|
||||||
|
t.is(qlconfigFile, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => {
|
||||||
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env.CODEQL_REGISTRIES_AUTH = "original";
|
||||||
|
const registriesInput = yaml.dump([
|
||||||
|
{
|
||||||
|
url: "http://ghcr.io",
|
||||||
|
packages: ["codeql/*", "dsp-testing/*"],
|
||||||
|
token: "not-a-token",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
const codeQL = (0, codeql_1.setCodeQL)({
|
||||||
|
// Accepted CLI versions are 2.10.4 or higher
|
||||||
|
getVersion: () => Promise.resolve(codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD),
|
||||||
|
});
|
||||||
|
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||||
|
const { registriesAuthTokens, qlconfigFile } = await configUtils.generateRegistries(registriesInput, codeQL, tmpDir, logger);
|
||||||
|
t.is(registriesAuthTokens, "original");
|
||||||
|
t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml"));
|
||||||
|
});
|
||||||
|
});
|
||||||
// getLanguages
|
// getLanguages
|
||||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
||||||
// eslint-disable-next-line github/array-foreach
|
// eslint-disable-next-line github/array-foreach
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20230120",
|
"bundleVersion": "codeql-bundle-20230304",
|
||||||
"cliVersion": "2.12.1",
|
"cliVersion": "2.12.4",
|
||||||
"priorBundleVersion": "codeql-bundle-20230105",
|
"priorBundleVersion": "codeql-bundle-20230217",
|
||||||
"priorCliVersion": "2.12.0"
|
"priorCliVersion": "2.12.3"
|
||||||
}
|
}
|
||||||
|
|||||||
79
lib/feature-flags.js
generated
79
lib/feature-flags.js
generated
@@ -34,46 +34,37 @@ const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
|||||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||||
var Feature;
|
var Feature;
|
||||||
(function (Feature) {
|
(function (Feature) {
|
||||||
Feature["BypassToolcacheEnabled"] = "bypass_toolcache_enabled";
|
|
||||||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
|
||||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||||
|
Feature["ExportCodeScanningConfigEnabled"] = "export_code_scanning_config_enabled";
|
||||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
|
||||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||||
exports.featureConfig = {
|
exports.featureConfig = {
|
||||||
[Feature.BypassToolcacheEnabled]: {
|
|
||||||
envVar: "CODEQL_BYPASS_TOOLCACHE",
|
|
||||||
// Cannot specify a minimum version because this flag is checked before we have
|
|
||||||
// access to the CodeQL instance.
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
[Feature.BypassToolcacheKotlinSwiftEnabled]: {
|
|
||||||
envVar: "CODEQL_BYPASS_TOOLCACHE_KOTLIN_SWIFT",
|
|
||||||
// Cannot specify a minimum version because this flag is checked before we have
|
|
||||||
// access to the CodeQL instance.
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
|
||||||
[Feature.DisableKotlinAnalysisEnabled]: {
|
[Feature.DisableKotlinAnalysisEnabled]: {
|
||||||
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
[Feature.CliConfigFileEnabled]: {
|
[Feature.CliConfigFileEnabled]: {
|
||||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||||
minimumVersion: "2.11.6",
|
minimumVersion: "2.11.6",
|
||||||
|
defaultValue: true,
|
||||||
|
},
|
||||||
|
[Feature.ExportCodeScanningConfigEnabled]: {
|
||||||
|
envVar: "CODEQL_ACTION_EXPORT_CODE_SCANNING_CONFIG",
|
||||||
|
minimumVersion: "2.12.3",
|
||||||
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
[Feature.MlPoweredQueriesEnabled]: {
|
[Feature.MlPoweredQueriesEnabled]: {
|
||||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||||
minimumVersion: "2.7.5",
|
minimumVersion: "2.7.5",
|
||||||
},
|
defaultValue: false,
|
||||||
[Feature.TrapCachingEnabled]: {
|
|
||||||
envVar: "CODEQL_TRAP_CACHING",
|
|
||||||
minimumVersion: undefined,
|
|
||||||
},
|
},
|
||||||
[Feature.UploadFailedSarifEnabled]: {
|
[Feature.UploadFailedSarifEnabled]: {
|
||||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||||
minimumVersion: "2.11.3",
|
minimumVersion: "2.11.3",
|
||||||
|
defaultValue: false,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||||
@@ -84,6 +75,7 @@ exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
|||||||
*/
|
*/
|
||||||
class Features {
|
class Features {
|
||||||
constructor(gitHubVersion, repositoryNwo, tempDir, logger) {
|
constructor(gitHubVersion, repositoryNwo, tempDir, logger) {
|
||||||
|
this.logger = logger;
|
||||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(gitHubVersion, repositoryNwo, path.join(tempDir, exports.FEATURE_FLAGS_FILE_NAME), logger);
|
this.gitHubFeatureFlags = new GitHubFeatureFlags(gitHubVersion, repositoryNwo, path.join(tempDir, exports.FEATURE_FLAGS_FILE_NAME), logger);
|
||||||
}
|
}
|
||||||
async getDefaultCliVersion(variant) {
|
async getDefaultCliVersion(variant) {
|
||||||
@@ -105,28 +97,39 @@ class Features {
|
|||||||
if (!codeql && exports.featureConfig[feature].minimumVersion) {
|
if (!codeql && exports.featureConfig[feature].minimumVersion) {
|
||||||
throw new Error(`Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`);
|
throw new Error(`Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`);
|
||||||
}
|
}
|
||||||
// Bypassing the toolcache is disabled in test mode.
|
|
||||||
if (feature === Feature.BypassToolcacheEnabled && util.isInTestMode()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
const envVar = (process.env[exports.featureConfig[feature].envVar] || "").toLocaleLowerCase();
|
||||||
// Do not use this feature if user explicitly disables it via an environment variable.
|
// Do not use this feature if user explicitly disables it via an environment variable.
|
||||||
if (envVar === "false") {
|
if (envVar === "false") {
|
||||||
|
this.logger.debug(`Feature ${feature} is disabled via the environment variable ${exports.featureConfig[feature].envVar}.`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// Never use this feature if the CLI version explicitly can't support it.
|
// Never use this feature if the CLI version explicitly can't support it.
|
||||||
const minimumVersion = exports.featureConfig[feature].minimumVersion;
|
const minimumVersion = exports.featureConfig[feature].minimumVersion;
|
||||||
if (codeql && minimumVersion) {
|
if (codeql && minimumVersion) {
|
||||||
if (!(await util.codeQlVersionAbove(codeql, minimumVersion))) {
|
if (!(await util.codeQlVersionAbove(codeql, minimumVersion))) {
|
||||||
|
this.logger.debug(`Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum ` +
|
||||||
|
`version ${minimumVersion}.`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
this.logger.debug(`CodeQL CLI version ${await codeql.getVersion()} is newer than the minimum ` +
|
||||||
|
`version ${minimumVersion} for feature ${feature}.`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Use this feature if user explicitly enables it via an environment variable.
|
// Use this feature if user explicitly enables it via an environment variable.
|
||||||
if (envVar === "true") {
|
if (envVar === "true") {
|
||||||
|
this.logger.debug(`Feature ${feature} is enabled via the environment variable ${exports.featureConfig[feature].envVar}.`);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// Ask the GitHub API if the feature is enabled.
|
// Ask the GitHub API if the feature is enabled.
|
||||||
return await this.gitHubFeatureFlags.getValue(feature);
|
const apiValue = await this.gitHubFeatureFlags.getValue(feature);
|
||||||
|
if (apiValue !== undefined) {
|
||||||
|
this.logger.debug(`Feature ${feature} is ${apiValue ? "enabled" : "disabled"} via the GitHub API.`);
|
||||||
|
return apiValue;
|
||||||
|
}
|
||||||
|
const defaultValue = exports.featureConfig[feature].defaultValue;
|
||||||
|
this.logger.debug(`Feature ${feature} is ${defaultValue ? "enabled" : "disabled"} due to its default value.`);
|
||||||
|
return defaultValue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.Features = Features;
|
exports.Features = Features;
|
||||||
@@ -136,7 +139,7 @@ class GitHubFeatureFlags {
|
|||||||
this.repositoryNwo = repositoryNwo;
|
this.repositoryNwo = repositoryNwo;
|
||||||
this.featureFlagsFile = featureFlagsFile;
|
this.featureFlagsFile = featureFlagsFile;
|
||||||
this.logger = logger;
|
this.logger = logger;
|
||||||
/**/
|
this.hasAccessedRemoteFeatureFlags = false; // Not accessed by default.
|
||||||
}
|
}
|
||||||
getCliVersionFromFeatureFlag(f) {
|
getCliVersionFromFeatureFlag(f) {
|
||||||
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) ||
|
||||||
@@ -157,7 +160,9 @@ class GitHubFeatureFlags {
|
|||||||
const defaultDotComCliVersion = await this.getDefaultDotcomCliVersion();
|
const defaultDotComCliVersion = await this.getDefaultDotcomCliVersion();
|
||||||
return {
|
return {
|
||||||
cliVersion: defaultDotComCliVersion.version,
|
cliVersion: defaultDotComCliVersion.version,
|
||||||
toolsFeatureFlagsValid: defaultDotComCliVersion.toolsFeatureFlagsValid,
|
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||||
|
? defaultDotComCliVersion.toolsFeatureFlagsValid
|
||||||
|
: undefined,
|
||||||
variant,
|
variant,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -188,7 +193,9 @@ class GitHubFeatureFlags {
|
|||||||
`shipped with the Action. This is ${defaults.cliVersion}.`);
|
`shipped with the Action. This is ${defaults.cliVersion}.`);
|
||||||
return {
|
return {
|
||||||
version: defaults.cliVersion,
|
version: defaults.cliVersion,
|
||||||
toolsFeatureFlagsValid: false,
|
toolsFeatureFlagsValid: this.hasAccessedRemoteFeatureFlags
|
||||||
|
? false
|
||||||
|
: undefined,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
const maxCliVersion = enabledFeatureFlagCliVersions.reduce((maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, enabledFeatureFlagCliVersions[0]);
|
||||||
@@ -198,15 +205,15 @@ class GitHubFeatureFlags {
|
|||||||
async getValue(feature) {
|
async getValue(feature) {
|
||||||
const response = await this.getAllFeatures();
|
const response = await this.getAllFeatures();
|
||||||
if (response === undefined) {
|
if (response === undefined) {
|
||||||
this.logger.debug(`No feature flags API response for ${feature}, considering it disabled.`);
|
this.logger.debug(`No feature flags API response for ${feature}.`);
|
||||||
return false;
|
return undefined;
|
||||||
}
|
}
|
||||||
const featureEnablement = response[feature];
|
const features = response[feature];
|
||||||
if (featureEnablement === undefined) {
|
if (features === undefined) {
|
||||||
this.logger.debug(`Feature '${feature}' undefined in API response, considering it disabled.`);
|
this.logger.debug(`Feature '${feature}' undefined in API response.`);
|
||||||
return false;
|
return undefined;
|
||||||
}
|
}
|
||||||
return !!featureEnablement;
|
return !!features;
|
||||||
}
|
}
|
||||||
async getAllFeatures() {
|
async getAllFeatures() {
|
||||||
// if we have an in memory cache, use that
|
// if we have an in memory cache, use that
|
||||||
@@ -255,6 +262,7 @@ class GitHubFeatureFlags {
|
|||||||
// Do nothing when not running against github.com
|
// Do nothing when not running against github.com
|
||||||
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
if (this.gitHubVersion.type !== util.GitHubVariant.DOTCOM) {
|
||||||
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
this.logger.debug("Not running against github.com. Disabling all toggleable features.");
|
||||||
|
this.hasAccessedRemoteFeatureFlags = false;
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -265,6 +273,7 @@ class GitHubFeatureFlags {
|
|||||||
const remoteFlags = response.data;
|
const remoteFlags = response.data;
|
||||||
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
this.logger.debug("Loaded the following default values for the feature flags from the Code Scanning API: " +
|
||||||
`${JSON.stringify(remoteFlags)}`);
|
`${JSON.stringify(remoteFlags)}`);
|
||||||
|
this.hasAccessedRemoteFeatureFlags = true;
|
||||||
return remoteFlags;
|
return remoteFlags;
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -273,6 +282,7 @@ class GitHubFeatureFlags {
|
|||||||
"As a result, it will not be opted into any experimental features. " +
|
"As a result, it will not be opted into any experimental features. " +
|
||||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||||
|
this.hasAccessedRemoteFeatureFlags = false;
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -283,7 +293,6 @@ class GitHubFeatureFlags {
|
|||||||
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
throw new Error(`Encountered an error while trying to determine feature enablement: ${e}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//# sourceMappingURL=feature-flags.js.map
|
//# sourceMappingURL=feature-flags.js.map
|
||||||
File diff suppressed because one or more lines are too long
80
lib/feature-flags.test.js
generated
80
lib/feature-flags.test.js
generated
@@ -51,9 +51,9 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
|||||||
(0, ava_1.default)(`All features are disabled if running against ${variant.description}`, async (t) => {
|
(0, ava_1.default)(`All features are disabled if running against ${variant.description}`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages), variant.gitHubVersion);
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.false(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
t.deepEqual(await features.getValue(feature, includeCodeQlIfRequired(feature)), feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message ===
|
v.message ===
|
||||||
@@ -61,33 +61,35 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
(0, ava_1.default)("API response missing", async (t) => {
|
(0, ava_1.default)("API response missing and features use default value", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(403, {});
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
t.assert((await features.getValue(feature, includeCodeQlIfRequired(feature))) ===
|
||||||
|
feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("Features are disabled if they're not returned in API response", async (t) => {
|
(0, ava_1.default)("Features use default value if they're not returned in API response", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, {});
|
||||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||||
t.assert((await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))) === false);
|
t.assert((await features.getValue(feature, includeCodeQlIfRequired(feature))) ===
|
||||||
|
feature_flags_1.featureConfig[feature].defaultValue);
|
||||||
}
|
}
|
||||||
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
assertAllFeaturesUndefinedInApi(t, loggedMessages);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
(0, ava_1.default)("Feature flags exception is propagated if the API request errors", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(500, {});
|
||||||
await t.throwsAsync(async () => featureEnablement.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.MlPoweredQueriesEnabled)), {
|
await t.throwsAsync(async () => features.getValue(feature_flags_1.Feature.MlPoweredQueriesEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.MlPoweredQueriesEnabled)), {
|
||||||
message: "Encountered an error while trying to determine feature enablement: Error: some error message",
|
message: "Encountered an error while trying to determine feature enablement: Error: some error message",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -95,7 +97,7 @@ for (const variant of ALL_FEATURES_DISABLED_VARIANTS) {
|
|||||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => {
|
(0, ava_1.default)(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
// set all features to false except the one we're testing
|
// set all features to false except the one we're testing
|
||||||
const expectedFeatureEnablement = {};
|
const expectedFeatureEnablement = {};
|
||||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||||
@@ -105,7 +107,7 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
|||||||
// retrieve the values of the actual features
|
// retrieve the values of the actual features
|
||||||
const actualFeatureEnablement = {};
|
const actualFeatureEnablement = {};
|
||||||
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
for (const f of Object.keys(feature_flags_1.featureConfig)) {
|
||||||
actualFeatureEnablement[f] = await featureEnablement.getValue(f, includeCodeQlIfRequired(f));
|
actualFeatureEnablement[f] = await features.getValue(f, includeCodeQlIfRequired(f));
|
||||||
}
|
}
|
||||||
// All features should be false except the one we're testing
|
// All features should be false except the one we're testing
|
||||||
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
||||||
@@ -113,35 +115,35 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
|||||||
});
|
});
|
||||||
(0, ava_1.default)(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => {
|
(0, ava_1.default)(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(false);
|
const expectedFeatureEnablement = initializeFeatures(false);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
// feature should be disabled initially
|
// feature should be disabled initially
|
||||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
t.assert(!(await features.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||||
// set env var to true and check that the feature is now enabled
|
// set env var to true and check that the feature is now enabled
|
||||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
t.assert(await features.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => {
|
(0, ava_1.default)(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
// feature should be enabled initially
|
// feature should be enabled initially
|
||||||
t.assert(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature)));
|
t.assert(await features.getValue(feature, includeCodeQlIfRequired(feature)));
|
||||||
// set env var to false and check that the feature is now disabled
|
// set env var to false and check that the feature is now disabled
|
||||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||||
t.assert(!(await featureEnablement.getValue(feature, includeCodeQlIfRequired(feature))));
|
t.assert(!(await features.getValue(feature, includeCodeQlIfRequired(feature))));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||||
(0, ava_1.default)(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => {
|
(0, ava_1.default)(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
await t.throwsAsync(async () => featureEnablement.getValue(feature), {
|
await t.throwsAsync(async () => features.getValue(feature), {
|
||||||
message: `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`,
|
message: `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.`,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -150,24 +152,24 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
|||||||
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
if (feature_flags_1.featureConfig[feature].minimumVersion !== undefined) {
|
||||||
(0, ava_1.default)(`Feature '${feature}' is disabled if the minimum CLI version is below ${feature_flags_1.featureConfig[feature].minimumVersion}`, async (t) => {
|
(0, ava_1.default)(`Feature '${feature}' is disabled if the minimum CLI version is below ${feature_flags_1.featureConfig[feature].minimumVersion}`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
// feature should be disabled when an old CLI version is set
|
// feature should be disabled when an old CLI version is set
|
||||||
let codeql = (0, testing_utils_1.mockCodeQLVersion)("2.0.0");
|
let codeql = (0, testing_utils_1.mockCodeQLVersion)("2.0.0");
|
||||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
t.assert(!(await features.getValue(feature, codeql)));
|
||||||
// even setting the env var to true should not enable the feature if
|
// even setting the env var to true should not enable the feature if
|
||||||
// the minimum CLI version is not met
|
// the minimum CLI version is not met
|
||||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
process.env[feature_flags_1.featureConfig[feature].envVar] = "true";
|
||||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
t.assert(!(await features.getValue(feature, codeql)));
|
||||||
// feature should be enabled when a new CLI version is set
|
// feature should be enabled when a new CLI version is set
|
||||||
// and env var is not set
|
// and env var is not set
|
||||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "";
|
process.env[feature_flags_1.featureConfig[feature].envVar] = "";
|
||||||
codeql = (0, testing_utils_1.mockCodeQLVersion)(feature_flags_1.featureConfig[feature].minimumVersion);
|
codeql = (0, testing_utils_1.mockCodeQLVersion)(feature_flags_1.featureConfig[feature].minimumVersion);
|
||||||
t.assert(await featureEnablement.getValue(feature, codeql));
|
t.assert(await features.getValue(feature, codeql));
|
||||||
// set env var to false and check that the feature is now disabled
|
// set env var to false and check that the feature is now disabled
|
||||||
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
process.env[feature_flags_1.featureConfig[feature].envVar] = "false";
|
||||||
t.assert(!(await featureEnablement.getValue(feature, codeql)));
|
t.assert(!(await features.getValue(feature, codeql)));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -184,12 +186,12 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
|||||||
});
|
});
|
||||||
(0, ava_1.default)("Feature flags are saved to disk", async (t) => {
|
(0, ava_1.default)("Feature flags are saved to disk", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
||||||
t.false(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should not exist before getting feature flags");
|
t.false(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should not exist before getting feature flags");
|
||||||
t.true(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
t.true(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||||
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
||||||
const actualFeatureEnablement = JSON.parse(fs.readFileSync(cachedFeatureFlags, "utf8"));
|
const actualFeatureEnablement = JSON.parse(fs.readFileSync(cachedFeatureFlags, "utf8"));
|
||||||
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
|
||||||
@@ -197,20 +199,20 @@ for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
|||||||
actualFeatureEnablement[feature_flags_1.Feature.CliConfigFileEnabled] = false;
|
actualFeatureEnablement[feature_flags_1.Feature.CliConfigFileEnabled] = false;
|
||||||
fs.writeFileSync(cachedFeatureFlags, JSON.stringify(actualFeatureEnablement));
|
fs.writeFileSync(cachedFeatureFlags, JSON.stringify(actualFeatureEnablement));
|
||||||
// delete the in memory cache so that we are forced to use the cached file
|
// delete the in memory cache so that we are forced to use the cached file
|
||||||
featureEnablement.gitHubFeatureFlags.cachedApiResponse = undefined;
|
features.gitHubFeatureFlags.cachedApiResponse = undefined;
|
||||||
t.false(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled after reading from cached file");
|
t.false(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled after reading from cached file");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("Environment variable can override feature flag cache", async (t) => {
|
(0, ava_1.default)("Environment variable can override feature flag cache", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
const cachedFeatureFlags = path.join(tmpDir, feature_flags_1.FEATURE_FLAGS_FILE_NAME);
|
||||||
t.true(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
t.true(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be enabled initially");
|
||||||
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
t.true(fs.existsSync(cachedFeatureFlags), "Feature flag cached file should exist after getting feature flags");
|
||||||
process.env.CODEQL_PASS_CONFIG_TO_CLI = "false";
|
process.env.CODEQL_PASS_CONFIG_TO_CLI = "false";
|
||||||
t.false(await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be disabled after setting env var");
|
t.false(await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, includeCodeQlIfRequired(feature_flags_1.Feature.CliConfigFileEnabled)), "Feature flag should be disabled after setting env var");
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||||
@@ -228,7 +230,7 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
|||||||
}
|
}
|
||||||
(0, ava_1.default)("selects CLI v2.12.1 on Dotcom when feature flags enable v2.12.0 and v2.12.1", async (t) => {
|
(0, ava_1.default)("selects CLI v2.12.1 on Dotcom when feature flags enable v2.12.0 and v2.12.1", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||||
@@ -237,7 +239,7 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
|||||||
expectedFeatureEnablement["default_codeql_version_2_12_4_enabled"] = false;
|
expectedFeatureEnablement["default_codeql_version_2_12_4_enabled"] = false;
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_5_enabled"] = false;
|
expectedFeatureEnablement["default_codeql_version_2_12_5_enabled"] = false;
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||||
t.deepEqual(defaultCliVersion, {
|
t.deepEqual(defaultCliVersion, {
|
||||||
cliVersion: "2.12.1",
|
cliVersion: "2.12.1",
|
||||||
toolsFeatureFlagsValid: true,
|
toolsFeatureFlagsValid: true,
|
||||||
@@ -247,10 +249,10 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
|||||||
});
|
});
|
||||||
(0, ava_1.default)(`selects CLI from defaults.json on Dotcom when no default version feature flags are enabled`, async (t) => {
|
(0, ava_1.default)(`selects CLI from defaults.json on Dotcom when no default version feature flags are enabled`, async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir);
|
const features = setUpFeatureFlagTests(tmpDir);
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||||
t.deepEqual(defaultCliVersion, {
|
t.deepEqual(defaultCliVersion, {
|
||||||
cliVersion: defaults.cliVersion,
|
cliVersion: defaults.cliVersion,
|
||||||
toolsFeatureFlagsValid: false,
|
toolsFeatureFlagsValid: false,
|
||||||
@@ -261,14 +263,14 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
|||||||
(0, ava_1.default)("ignores invalid version numbers in default version feature flags", async (t) => {
|
(0, ava_1.default)("ignores invalid version numbers in default version feature flags", async (t) => {
|
||||||
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||||
const loggedMessages = [];
|
const loggedMessages = [];
|
||||||
const featureEnablement = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
const features = setUpFeatureFlagTests(tmpDir, (0, testing_utils_1.getRecordingLogger)(loggedMessages));
|
||||||
const expectedFeatureEnablement = initializeFeatures(true);
|
const expectedFeatureEnablement = initializeFeatures(true);
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
expectedFeatureEnablement["default_codeql_version_2_12_0_enabled"] = true;
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
expectedFeatureEnablement["default_codeql_version_2_12_1_enabled"] = true;
|
||||||
expectedFeatureEnablement["default_codeql_version_2_12_invalid_enabled"] =
|
expectedFeatureEnablement["default_codeql_version_2_12_invalid_enabled"] =
|
||||||
true;
|
true;
|
||||||
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
(0, testing_utils_1.mockFeatureFlagApiEndpoint)(200, expectedFeatureEnablement);
|
||||||
const defaultCliVersion = await featureEnablement.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
const defaultCliVersion = await features.getDefaultCliVersion(util_1.GitHubVariant.DOTCOM);
|
||||||
t.deepEqual(defaultCliVersion, {
|
t.deepEqual(defaultCliVersion, {
|
||||||
cliVersion: "2.12.1",
|
cliVersion: "2.12.1",
|
||||||
toolsFeatureFlagsValid: true,
|
toolsFeatureFlagsValid: true,
|
||||||
@@ -283,7 +285,7 @@ function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
|||||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||||
v.message.includes(feature) &&
|
v.message.includes(feature) &&
|
||||||
v.message.includes("considering it disabled")) !== undefined);
|
v.message.includes("undefined in API response")) !== undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function initializeFeatures(initialValue) {
|
function initializeFeatures(initialValue) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
14
lib/init-action-post-helper.js
generated
14
lib/init-action-post-helper.js
generated
@@ -43,12 +43,12 @@ function createFailedUploadFailedSarifResult(error) {
|
|||||||
* Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF
|
* Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF
|
||||||
* category for the workflow.
|
* category for the workflow.
|
||||||
*/
|
*/
|
||||||
async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) {
|
||||||
if (!config.codeQLCmd) {
|
if (!config.codeQLCmd) {
|
||||||
return { upload_failed_run_skipped_because: "CodeQL command not found" };
|
return { upload_failed_run_skipped_because: "CodeQL command not found" };
|
||||||
}
|
}
|
||||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
if (!(await features.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||||
return { upload_failed_run_skipped_because: "Feature disabled" };
|
return { upload_failed_run_skipped_because: "Feature disabled" };
|
||||||
}
|
}
|
||||||
const workflow = await (0, workflow_1.getWorkflow)();
|
const workflow = await (0, workflow_1.getWorkflow)();
|
||||||
@@ -61,16 +61,16 @@ async function maybeUploadFailedSarif(config, repositoryNwo, featureEnablement,
|
|||||||
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||||
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||||
const sarifFile = "../codeql-failed-run.sarif";
|
const sarifFile = "../codeql-failed-run.sarif";
|
||||||
await codeql.diagnosticsExport(sarifFile, category);
|
await codeql.diagnosticsExport(sarifFile, category, config, features);
|
||||||
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||||
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||||
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||||
return uploadResult?.statusReport ?? {};
|
return uploadResult?.statusReport ?? {};
|
||||||
}
|
}
|
||||||
async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger) {
|
async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) {
|
||||||
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
||||||
try {
|
try {
|
||||||
return await maybeUploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
return await maybeUploadFailedSarif(config, repositoryNwo, features, logger);
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
logger.debug(`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`);
|
logger.debug(`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`);
|
||||||
@@ -84,13 +84,13 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablemen
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.tryUploadSarifIfRunFailed = tryUploadSarifIfRunFailed;
|
exports.tryUploadSarifIfRunFailed = tryUploadSarifIfRunFailed;
|
||||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, features, logger) {
|
||||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, featureEnablement, logger);
|
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger);
|
||||||
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
if (uploadFailedSarifResult.upload_failed_run_skipped_because) {
|
||||||
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
logger.debug("Won't upload a failed SARIF file for this CodeQL code scanning run because: " +
|
||||||
`${uploadFailedSarifResult.upload_failed_run_skipped_because}.`);
|
`${uploadFailedSarifResult.upload_failed_run_skipped_because}.`);
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,iBAAiB,EACjB,MAAM,CACP,CAAC;IACF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AApDD,kBAoDC"}
|
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAAuF;AACvF,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAWpB,SAAS,mCAAmC,CAC1C,KAAc;IAEd,OAAO;QACL,uBAAuB,EACrB,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;QACxD,6BAA6B,EAC3B,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS;KACnD,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IAAI,CAAC,CAAC,MAAM,QAAQ,CAAC,QAAQ,CAAC,uBAAO,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC,EAAE;QACxE,OAAO,EAAE,iCAAiC,EAAE,kBAAkB,EAAE,CAAC;KAClE;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,OAAO,EAAE,iCAAiC,EAAE,0BAA0B,EAAE,CAAC;KAC1E;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAEtE,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;IACF,OAAO,YAAY,EAAE,YAAY,IAAI,EAAE,CAAC;AAC1C,CAAC;AAEM,KAAK,UAAU,yBAAyB,CAC7C,MAAc,EACd,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,IAAI,OAAO,CAAC,GAAG,CAAC,oEAA+C,CAAC,KAAK,MAAM,EAAE;QAC3E,IAAI;YACF,OAAO,MAAM,sBAAsB,CACjC,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;SACH;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,CAAC,KAAK,CACV,2EAA2E,CAAC,EAAE,CAC/E,CAAC;YACF,OAAO,mCAAmC,CAAC,CAAC,CAAC,CAAC;SAC/C;KACF;SAAM;QACL,OAAO;YACL,iCAAiC,EAC/B,uCAAuC;SAC1C,CAAC;KACH;AACH,CAAC;AA1BD,8DA0BC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,QAA2B,EAC3B,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,MAAM,uBAAuB,GAAG,MAAM,yBAAyB,CAC7D,MAAM,EACN,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;IACF,IAAI,uBAAuB,CAAC,iCAAiC,EAAE;QAC7D,MAAM,CAAC,KAAK,CACV,8EAA8E;YAC5E,GAAG,uBAAuB,CAAC,iCAAiC,GAAG,CAClE,CAAC;KACH;IACD,8FAA8F;IAC9F,iCAAiC;IACjC,IACE,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM;QAClE,CAAC,uBAAuB,CAAC,qBAAqB,EAC9C;QACA,MAAM,IAAI,KAAK,CACb,4EAA4E;YAC1E,8BAA8B,uBAAuB,GAAG,CAC3D,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;IAED,OAAO,uBAAuB,CAAC;AACjC,CAAC;AApDD,kBAoDC"}
|
||||||
2
lib/init-action-post-helper.test.js
generated
2
lib/init-action-post-helper.test.js
generated
@@ -254,7 +254,7 @@ async function testFailedSarifUpload(t, actionsWorkflow, { category, expectUploa
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (expectUpload) {
|
if (expectUpload) {
|
||||||
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category, sinon.match.any, sinon.match.any), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||||
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||||
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||||
isUnsuccessfulExecution: true,
|
isUnsuccessfulExecution: true,
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
26
lib/init-action.js
generated
26
lib/init-action.js
generated
@@ -46,12 +46,13 @@ async function sendInitStatusReport(actionStatus, startedAt, config, toolsDownlo
|
|||||||
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
tools_source: toolsSource || init_1.ToolsSource.Unknown,
|
||||||
workflow_languages: workflowLanguages || "",
|
workflow_languages: workflowLanguages || "",
|
||||||
};
|
};
|
||||||
let initToolsDownloadFields = {};
|
const initToolsDownloadFields = {};
|
||||||
if (toolsSource === init_1.ToolsSource.Download) {
|
if (toolsDownloadDurationMs !== undefined) {
|
||||||
initToolsDownloadFields = {
|
initToolsDownloadFields.tools_download_duration_ms =
|
||||||
tools_download_duration_ms: toolsDownloadDurationMs,
|
toolsDownloadDurationMs;
|
||||||
tools_feature_flags_valid: toolsFeatureFlagsValid,
|
}
|
||||||
};
|
if (toolsFeatureFlagsValid !== undefined) {
|
||||||
|
initToolsDownloadFields.tools_feature_flags_valid = toolsFeatureFlagsValid;
|
||||||
}
|
}
|
||||||
if (config !== undefined) {
|
if (config !== undefined) {
|
||||||
const languages = config.languages.join(",");
|
const languages = config.languages.join(",");
|
||||||
@@ -112,6 +113,7 @@ async function run() {
|
|||||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||||
|
const registriesInput = (0, actions_util_1.getOptionalInput)("registries");
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||||
@@ -122,13 +124,13 @@ async function run() {
|
|||||||
if (codeQLDefaultVersionInfo.variant === util_1.GitHubVariant.DOTCOM) {
|
if (codeQLDefaultVersionInfo.variant === util_1.GitHubVariant.DOTCOM) {
|
||||||
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
|
||||||
}
|
}
|
||||||
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, await (0, util_1.shouldBypassToolcache)(features, (0, actions_util_1.getOptionalInput)("tools"), (0, actions_util_1.getOptionalInput)("languages"), repositoryNwo, logger), codeQLDefaultVersionInfo, logger);
|
const initCodeQLResult = await (0, init_1.initCodeQL)((0, actions_util_1.getOptionalInput)("tools"), apiDetails, (0, actions_util_1.getTemporaryDirectory)(), gitHubVersion.type, codeQLDefaultVersionInfo, logger);
|
||||||
codeql = initCodeQLResult.codeql;
|
codeql = initCodeQLResult.codeql;
|
||||||
toolsDownloadDurationMs = initCodeQLResult.toolsDownloadDurationMs;
|
toolsDownloadDurationMs = initCodeQLResult.toolsDownloadDurationMs;
|
||||||
toolsVersion = initCodeQLResult.toolsVersion;
|
toolsVersion = initCodeQLResult.toolsVersion;
|
||||||
toolsSource = initCodeQLResult.toolsSource;
|
toolsSource = initCodeQLResult.toolsSource;
|
||||||
await (0, util_1.enrichEnvironment)(codeql);
|
await (0, util_1.enrichEnvironment)(codeql);
|
||||||
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), (0, actions_util_1.getOptionalInput)("registries"), (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), await getTrapCachingEnabled(features),
|
config = await (0, init_1.initConfig)((0, actions_util_1.getOptionalInput)("languages"), (0, actions_util_1.getOptionalInput)("queries"), (0, actions_util_1.getOptionalInput)("packs"), registriesInput, (0, actions_util_1.getOptionalInput)("config-file"), (0, actions_util_1.getOptionalInput)("db-location"), getTrapCachingEnabled(),
|
||||||
// Debug mode is enabled if:
|
// Debug mode is enabled if:
|
||||||
// - The `init` Action is passed `debug: true`.
|
// - The `init` Action is passed `debug: true`.
|
||||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||||
@@ -172,7 +174,7 @@ async function run() {
|
|||||||
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
|
core.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true");
|
||||||
}
|
}
|
||||||
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
const sourceRoot = path.resolve((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), (0, actions_util_1.getOptionalInput)("source-root") || "");
|
||||||
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", features, logger);
|
const tracerConfig = await (0, init_1.runInit)(codeql, config, sourceRoot, "Runner.Worker.exe", registriesInput, features, apiDetails, logger);
|
||||||
if (tracerConfig !== undefined) {
|
if (tracerConfig !== undefined) {
|
||||||
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
for (const [key, value] of Object.entries(tracerConfig.env)) {
|
||||||
core.exportVariable(key, value);
|
core.exportVariable(key, value);
|
||||||
@@ -192,7 +194,7 @@ async function run() {
|
|||||||
}
|
}
|
||||||
await sendInitStatusReport("success", startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger);
|
await sendInitStatusReport("success", startedAt, config, toolsDownloadDurationMs, toolsFeatureFlagsValid, toolsSource, toolsVersion, logger);
|
||||||
}
|
}
|
||||||
async function getTrapCachingEnabled(featureEnablement) {
|
function getTrapCachingEnabled() {
|
||||||
// If the workflow specified something always respect that
|
// If the workflow specified something always respect that
|
||||||
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
const trapCaching = (0, actions_util_1.getOptionalInput)("trap-caching");
|
||||||
if (trapCaching !== undefined)
|
if (trapCaching !== undefined)
|
||||||
@@ -200,8 +202,8 @@ async function getTrapCachingEnabled(featureEnablement) {
|
|||||||
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
// On self-hosted runners which may have slow network access, disable TRAP caching by default
|
||||||
if (!(0, util_1.isHostedRunner)())
|
if (!(0, util_1.isHostedRunner)())
|
||||||
return false;
|
return false;
|
||||||
// On hosted runners, respect the feature flag
|
// On hosted runners, enable TRAP caching by default
|
||||||
return await featureEnablement.getValue(feature_flags_1.Feature.TrapCachingEnabled);
|
return true;
|
||||||
}
|
}
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
26
lib/init.js
generated
26
lib/init.js
generated
@@ -41,28 +41,42 @@ var ToolsSource;
|
|||||||
ToolsSource["Toolcache"] = "TOOLCACHE";
|
ToolsSource["Toolcache"] = "TOOLCACHE";
|
||||||
ToolsSource["Download"] = "DOWNLOAD";
|
ToolsSource["Download"] = "DOWNLOAD";
|
||||||
})(ToolsSource = exports.ToolsSource || (exports.ToolsSource = {}));
|
})(ToolsSource = exports.ToolsSource || (exports.ToolsSource = {}));
|
||||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||||
logger.startGroup("Setup CodeQL tools");
|
logger.startGroup("Setup CodeQL tools");
|
||||||
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger, true);
|
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
||||||
await codeql.printVersion();
|
await codeql.printVersion();
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion };
|
return { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion };
|
||||||
}
|
}
|
||||||
exports.initCodeQL = initCodeQL;
|
exports.initCodeQL = initCodeQL;
|
||||||
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger) {
|
async function initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger) {
|
||||||
logger.startGroup("Load language configuration");
|
logger.startGroup("Load language configuration");
|
||||||
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, featureEnablement, logger);
|
const config = await configUtils.initConfig(languagesInput, queriesInput, packsInput, registriesInput, configFile, dbLocation, trapCachingEnabled, debugMode, debugArtifactName, debugDatabaseName, repository, tempDir, codeQL, workspacePath, gitHubVersion, apiDetails, features, logger);
|
||||||
analysisPaths.printPathFiltersWarning(config, logger);
|
analysisPaths.printPathFiltersWarning(config, logger);
|
||||||
logger.endGroup();
|
logger.endGroup();
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
exports.initConfig = initConfig;
|
exports.initConfig = initConfig;
|
||||||
async function runInit(codeql, config, sourceRoot, processName, featureEnablement, logger) {
|
async function runInit(codeql, config, sourceRoot, processName, registriesInput, features, apiDetails, logger) {
|
||||||
fs.mkdirSync(config.dbLocation, { recursive: true });
|
fs.mkdirSync(config.dbLocation, { recursive: true });
|
||||||
try {
|
try {
|
||||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||||
|
// When parsing the codeql config in the CLI, we have not yet created the qlconfig file.
|
||||||
|
// So, create it now.
|
||||||
|
// If we are parsing the config file in the Action, then the qlconfig file was already created
|
||||||
|
// before the `pack download` command was invoked. It is not required for the init command.
|
||||||
|
let registriesAuthTokens;
|
||||||
|
let qlconfigFile;
|
||||||
|
if (await util.useCodeScanningConfigInCli(codeql, features)) {
|
||||||
|
({ registriesAuthTokens, qlconfigFile } =
|
||||||
|
await configUtils.generateRegistries(registriesInput, codeql, config.tempDir, logger));
|
||||||
|
}
|
||||||
|
await configUtils.wrapEnvironment({
|
||||||
|
GITHUB_TOKEN: apiDetails.auth,
|
||||||
|
CODEQL_REGISTRIES_AUTH: registriesAuthTokens,
|
||||||
|
},
|
||||||
// Init a database cluster
|
// Init a database cluster
|
||||||
await codeql.databaseInitCluster(config, sourceRoot, processName, featureEnablement, logger);
|
async () => await codeql.databaseInitCluster(config, sourceRoot, processName, features, qlconfigFile, logger));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (const language of config.languages) {
|
for (const language of config.languages) {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
3
lib/languages.js
generated
3
lib/languages.js
generated
@@ -1,6 +1,6 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.resolveAlias = exports.KOTLIN_SWIFT_BYPASS = exports.LANGUAGE_ALIASES = exports.Language = void 0;
|
exports.isScannedLanguage = exports.isTracedLanguage = exports.parseLanguage = exports.resolveAlias = exports.LANGUAGE_ALIASES = exports.Language = void 0;
|
||||||
// All the languages supported by CodeQL
|
// All the languages supported by CodeQL
|
||||||
var Language;
|
var Language;
|
||||||
(function (Language) {
|
(function (Language) {
|
||||||
@@ -21,7 +21,6 @@ exports.LANGUAGE_ALIASES = {
|
|||||||
kotlin: Language.java,
|
kotlin: Language.java,
|
||||||
typescript: Language.javascript,
|
typescript: Language.javascript,
|
||||||
};
|
};
|
||||||
exports.KOTLIN_SWIFT_BYPASS = ["kotlin", "swift"];
|
|
||||||
function resolveAlias(lang) {
|
function resolveAlias(lang) {
|
||||||
return exports.LANGUAGE_ALIASES[lang] || lang;
|
return exports.LANGUAGE_ALIASES[lang] || lang;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIW,QAAA,mBAAmB,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AAEvD,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIF,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||||
292
lib/setup-codeql.js
generated
292
lib/setup-codeql.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.getCodeQLSource = exports.convertToSemVer = exports.getBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.findCodeQLBundleTagDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
exports.setupCodeQLBundle = exports.getCodeQLURLVersion = exports.downloadCodeQL = exports.tryGetFallbackToolcacheVersion = exports.getCodeQLSource = exports.convertToSemVer = exports.tryGetBundleVersionFromUrl = exports.tryFindCliVersionDotcomOnly = exports.findCodeQLBundleTagDotcomOnly = exports.getCodeQLActionRepository = exports.CODEQL_DEFAULT_ACTION_REPOSITORY = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const perf_hooks_1 = require("perf_hooks");
|
const perf_hooks_1 = require("perf_hooks");
|
||||||
@@ -211,14 +211,30 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger)
|
|||||||
}
|
}
|
||||||
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`;
|
return `https://github.com/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/releases/download/${tagName}/${codeQLBundleName}`;
|
||||||
}
|
}
|
||||||
function getBundleVersionFromUrl(url) {
|
function tryGetBundleVersionFromTagName(tagName, logger) {
|
||||||
const match = url.match(/\/codeql-bundle-(.*)\//);
|
const match = tagName.match(/^codeql-bundle-(.*)$/);
|
||||||
if (match === null || match.length < 2) {
|
if (match === null || match.length < 2) {
|
||||||
throw new Error(`Malformed tools url: ${url}. Bundle version could not be inferred`);
|
logger.debug(`Could not determine bundle version from tag ${tagName}.`);
|
||||||
|
return undefined;
|
||||||
}
|
}
|
||||||
return match[1];
|
return match[1];
|
||||||
}
|
}
|
||||||
exports.getBundleVersionFromUrl = getBundleVersionFromUrl;
|
function tryGetTagNameFromUrl(url, logger) {
|
||||||
|
const match = url.match(/\/(codeql-bundle-.*)\//);
|
||||||
|
if (match === null || match.length < 2) {
|
||||||
|
logger.debug(`Could not determine tag name for URL ${url}.`);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return match[1];
|
||||||
|
}
|
||||||
|
function tryGetBundleVersionFromUrl(url, logger) {
|
||||||
|
const tagName = tryGetTagNameFromUrl(url, logger);
|
||||||
|
if (tagName === undefined) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
}
|
||||||
|
exports.tryGetBundleVersionFromUrl = tryGetBundleVersionFromUrl;
|
||||||
function convertToSemVer(version, logger) {
|
function convertToSemVer(version, logger) {
|
||||||
if (!semver.valid(version)) {
|
if (!semver.valid(version)) {
|
||||||
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
logger.debug(`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`);
|
||||||
@@ -231,18 +247,10 @@ function convertToSemVer(version, logger) {
|
|||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
exports.convertToSemVer = convertToSemVer;
|
exports.convertToSemVer = convertToSemVer;
|
||||||
async function getOrFindBundleTagName(version, logger) {
|
|
||||||
if (version.variant === util.GitHubVariant.DOTCOM) {
|
|
||||||
return await findCodeQLBundleTagDotcomOnly(version.cliVersion, logger);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return version.tagName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Look for a version of the CodeQL tools in the cache which could override the requested CLI version.
|
* Look for a version of the CodeQL tools in the cache which could override the requested CLI version.
|
||||||
*/
|
*/
|
||||||
async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
async function findOverridingToolsInCache(humanReadableVersion, logger) {
|
||||||
const candidates = toolcache
|
const candidates = toolcache
|
||||||
.findAllVersions("CodeQL")
|
.findAllVersions("CodeQL")
|
||||||
.filter(util_1.isGoodVersion)
|
.filter(util_1.isGoodVersion)
|
||||||
@@ -253,7 +261,7 @@ async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
|||||||
.filter(({ folder }) => fs.existsSync(path.join(folder, "pinned-version")));
|
.filter(({ folder }) => fs.existsSync(path.join(folder, "pinned-version")));
|
||||||
if (candidates.length === 1) {
|
if (candidates.length === 1) {
|
||||||
const candidate = candidates[0];
|
const candidate = candidates[0];
|
||||||
logger.debug(`CodeQL tools version ${candidate.version} in toolcache overriding version ${requestedCliVersion}.`);
|
logger.debug(`CodeQL tools version ${candidate.version} in toolcache overriding version ${humanReadableVersion}.`);
|
||||||
return {
|
return {
|
||||||
codeqlFolder: candidate.folder,
|
codeqlFolder: candidate.folder,
|
||||||
sourceType: "toolcache",
|
sourceType: "toolcache",
|
||||||
@@ -269,7 +277,7 @@ async function findOverridingToolsInCache(requestedCliVersion, logger) {
|
|||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
async function getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger) {
|
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger) {
|
||||||
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
if (toolsInput && toolsInput !== "latest" && !toolsInput.startsWith("http")) {
|
||||||
return {
|
return {
|
||||||
codeqlTarPath: toolsInput,
|
codeqlTarPath: toolsInput,
|
||||||
@@ -277,124 +285,166 @@ async function getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, a
|
|||||||
toolsVersion: "local",
|
toolsVersion: "local",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const forceLatestReason =
|
|
||||||
// We use the special value of 'latest' to prioritize the version in the
|
|
||||||
// defaults over any pinned cached version.
|
|
||||||
toolsInput === "latest"
|
|
||||||
? '"tools: latest" was requested'
|
|
||||||
: // If the user hasn't requested a particular CodeQL version, then bypass
|
|
||||||
// the toolcache when the appropriate feature is enabled. This
|
|
||||||
// allows us to quickly rollback a broken bundle that has made its way
|
|
||||||
// into the toolcache.
|
|
||||||
toolsInput === undefined && bypassToolcache
|
|
||||||
? "a specific version of the CodeQL tools was not requested and the bypass toolcache feature is enabled"
|
|
||||||
: undefined;
|
|
||||||
const forceLatest = forceLatestReason !== undefined;
|
|
||||||
if (forceLatest) {
|
|
||||||
logger.debug(`Forcing the latest version of the CodeQL tools since ${forceLatestReason}.`);
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* The requested version is:
|
* Whether the tools shipped with the Action, i.e. those in `defaults.json`, have been forced.
|
||||||
*
|
*
|
||||||
* 1. The one in `defaults.json`, if forceLatest is true.
|
* We use the special value of 'latest' to prioritize the version in `defaults.json` over the
|
||||||
* 2. The version specified by the tools input URL, if one was provided.
|
* version specified by the feature flags on Dotcom and over any pinned cached version on
|
||||||
* 3. The default CLI version, otherwise.
|
* Enterprise Server.
|
||||||
|
|
||||||
* We include a `variant` property to let us verify using the type system that
|
|
||||||
* `tagName` is only undefined when the variant is Dotcom. This lets us ensure
|
|
||||||
* that we can always compute `tagName`, either by using the existing tag name
|
|
||||||
* on enterprise instances, or calling `findCodeQLBundleTagDotcomOnly` on
|
|
||||||
* Dotcom.
|
|
||||||
*/
|
*/
|
||||||
const requestedVersion = forceLatest
|
const forceShippedTools = toolsInput === "latest";
|
||||||
? // case 1
|
if (forceShippedTools) {
|
||||||
{
|
logger.info("Overriding the version of the CodeQL tools by the version shipped with the Action since " +
|
||||||
cliVersion: defaults.cliVersion,
|
`"tools: latest" was requested.`);
|
||||||
syntheticCliVersion: defaults.cliVersion,
|
}
|
||||||
tagName: defaults.bundleVersion,
|
/** CLI version number, for example 2.12.1. */
|
||||||
variant,
|
let cliVersion;
|
||||||
}
|
/** Tag name of the CodeQL bundle, for example `codeql-bundle-20230120`. */
|
||||||
: toolsInput !== undefined
|
let tagName;
|
||||||
? // case 2
|
/**
|
||||||
{
|
* URL of the CodeQL bundle.
|
||||||
syntheticCliVersion: convertToSemVer(getBundleVersionFromUrl(toolsInput), logger),
|
*
|
||||||
tagName: `codeql-bundle-${getBundleVersionFromUrl(toolsInput)}`,
|
* This does not always include a tag name.
|
||||||
url: toolsInput,
|
*/
|
||||||
variant,
|
let url;
|
||||||
}
|
if (forceShippedTools) {
|
||||||
: // case 3
|
cliVersion = defaults.cliVersion;
|
||||||
{
|
tagName = defaults.bundleVersion;
|
||||||
...defaultCliVersion,
|
}
|
||||||
syntheticCliVersion: defaultCliVersion.cliVersion,
|
else if (toolsInput !== undefined) {
|
||||||
};
|
// If a tools URL was provided, then use that.
|
||||||
// If we find the specified version, we always use that.
|
tagName = tryGetTagNameFromUrl(toolsInput, logger);
|
||||||
let codeqlFolder = toolcache.find("CodeQL", requestedVersion.syntheticCliVersion);
|
url = toolsInput;
|
||||||
let tagName = requestedVersion["tagName"];
|
}
|
||||||
if (!codeqlFolder) {
|
else {
|
||||||
logger.debug("Didn't find a version of the CodeQL tools in the toolcache with a version number " +
|
// Otherwise, use the default CLI version passed in.
|
||||||
`exactly matching ${requestedVersion.syntheticCliVersion}.`);
|
cliVersion = defaultCliVersion.cliVersion;
|
||||||
if (requestedVersion.cliVersion) {
|
tagName = defaultCliVersion["tagName"];
|
||||||
|
}
|
||||||
|
const bundleVersion = tagName && tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
const humanReadableVersion = cliVersion ??
|
||||||
|
(bundleVersion && convertToSemVer(bundleVersion, logger)) ??
|
||||||
|
tagName ??
|
||||||
|
url ??
|
||||||
|
"unknown";
|
||||||
|
logger.debug("Attempting to obtain CodeQL tools. " +
|
||||||
|
`CLI version: ${cliVersion ?? "unknown"}, ` +
|
||||||
|
`bundle tag name: ${tagName ?? "unknown"}, ` +
|
||||||
|
`URL: ${url ?? "unspecified"}.`);
|
||||||
|
let codeqlFolder;
|
||||||
|
if (cliVersion) {
|
||||||
|
// If we find the specified CLI version, we always use that.
|
||||||
|
codeqlFolder = toolcache.find("CodeQL", cliVersion);
|
||||||
|
// Fall back to matching `x.y.z-<tagName>`.
|
||||||
|
if (!codeqlFolder) {
|
||||||
|
logger.debug("Didn't find a version of the CodeQL tools in the toolcache with a version number " +
|
||||||
|
`exactly matching ${cliVersion}.`);
|
||||||
const allVersions = toolcache.findAllVersions("CodeQL");
|
const allVersions = toolcache.findAllVersions("CodeQL");
|
||||||
logger.debug(`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(allVersions)}.`);
|
logger.debug(`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(allVersions)}.`);
|
||||||
// If there is exactly one version of the CodeQL tools in the toolcache, and that version is
|
// If there is exactly one version of the CodeQL tools in the toolcache, and that version is
|
||||||
// the form `x.y.z-<tagName>`, then use it.
|
// the form `x.y.z-<tagName>`, then use it.
|
||||||
const candidateVersions = allVersions.filter((version) => version.startsWith(`${requestedVersion.cliVersion}-`));
|
const candidateVersions = allVersions.filter((version) => version.startsWith(`${cliVersion}-`));
|
||||||
if (candidateVersions.length === 1) {
|
if (candidateVersions.length === 1) {
|
||||||
logger.debug("Exactly one candidate version found, using that.");
|
logger.debug(`Exactly one version of the CodeQL tools starting with ${cliVersion} found in the ` +
|
||||||
|
"toolcache, using that.");
|
||||||
codeqlFolder = toolcache.find("CodeQL", candidateVersions[0]);
|
codeqlFolder = toolcache.find("CodeQL", candidateVersions[0]);
|
||||||
}
|
}
|
||||||
|
else if (candidateVersions.length === 0) {
|
||||||
|
logger.debug(`Didn't find any versions of the CodeQL tools starting with ${cliVersion} ` +
|
||||||
|
`in the toolcache. Trying next fallback method.`);
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Did not find exactly one version of the CodeQL tools starting with the requested version.");
|
logger.warning(`Found ${candidateVersions.length} versions of the CodeQL tools starting with ` +
|
||||||
|
`${cliVersion} in the toolcache, but at most one was expected.`);
|
||||||
|
logger.debug("Trying next fallback method.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!codeqlFolder && requestedVersion.cliVersion) {
|
// Fall back to matching `0.0.0-<bundleVersion>`.
|
||||||
// Fall back to accepting a `0.0.0-<tagName>` version if we didn't find the
|
if (!codeqlFolder && (cliVersion || tagName)) {
|
||||||
// `x.y.z` version. This is to support old versions of the toolcache.
|
if (cliVersion || tagName) {
|
||||||
//
|
const fallbackVersion = await tryGetFallbackToolcacheVersion(cliVersion, tagName, variant, logger);
|
||||||
// If we are on Dotcom, we will make an HTTP request to the Releases API here
|
if (fallbackVersion) {
|
||||||
// to find the tag name for the requested version.
|
codeqlFolder = toolcache.find("CodeQL", fallbackVersion);
|
||||||
tagName =
|
}
|
||||||
tagName || (await getOrFindBundleTagName(requestedVersion, logger));
|
else {
|
||||||
const fallbackVersion = convertToSemVer(tagName, logger);
|
logger.debug("Could not determine a fallback toolcache version number for CodeQL tools version " +
|
||||||
logger.debug(`Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL tools version ` +
|
`${humanReadableVersion}.`);
|
||||||
`${requestedVersion.cliVersion}.`);
|
}
|
||||||
codeqlFolder = toolcache.find("CodeQL", fallbackVersion);
|
}
|
||||||
|
else {
|
||||||
|
logger.debug("Both the CLI version and the bundle version are unknown, so we will not be able to find " +
|
||||||
|
"the requested version of the CodeQL tools in the toolcache.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (codeqlFolder) {
|
||||||
|
logger.info(`Found CodeQL tools version ${humanReadableVersion} in the toolcache.`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logger.info(`Did not find CodeQL tools version ${humanReadableVersion} in the toolcache.`);
|
||||||
}
|
}
|
||||||
if (codeqlFolder) {
|
if (codeqlFolder) {
|
||||||
return {
|
return {
|
||||||
codeqlFolder,
|
codeqlFolder,
|
||||||
sourceType: "toolcache",
|
sourceType: "toolcache",
|
||||||
toolsVersion: requestedVersion.syntheticCliVersion,
|
toolsVersion: cliVersion ?? humanReadableVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
logger.debug(`Did not find CodeQL tools version ${requestedVersion.syntheticCliVersion} in the toolcache.`);
|
|
||||||
// If we don't find the requested version on Enterprise, we may allow a
|
// If we don't find the requested version on Enterprise, we may allow a
|
||||||
// different version to save download time if the version hasn't been
|
// different version to save download time if the version hasn't been
|
||||||
// specified explicitly (in which case we always honor it).
|
// specified explicitly (in which case we always honor it).
|
||||||
if (variant !== util.GitHubVariant.DOTCOM && !forceLatest && !toolsInput) {
|
if (variant !== util.GitHubVariant.DOTCOM &&
|
||||||
const result = await findOverridingToolsInCache(requestedVersion.syntheticCliVersion, logger);
|
!forceShippedTools &&
|
||||||
|
!toolsInput) {
|
||||||
|
const result = await findOverridingToolsInCache(humanReadableVersion, logger);
|
||||||
if (result !== undefined) {
|
if (result !== undefined) {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!url) {
|
||||||
|
if (!tagName && cliVersion && variant === util.GitHubVariant.DOTCOM) {
|
||||||
|
tagName = await findCodeQLBundleTagDotcomOnly(cliVersion, logger);
|
||||||
|
}
|
||||||
|
else if (!tagName) {
|
||||||
|
throw new Error(`Could not obtain the requested version (${humanReadableVersion}) of the CodeQL tools ` +
|
||||||
|
"since we could not compute the tag name.");
|
||||||
|
}
|
||||||
|
url = await getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger);
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
cliVersion: requestedVersion.cliVersion || undefined,
|
bundleVersion: tagName && tryGetBundleVersionFromTagName(tagName, logger),
|
||||||
codeqlURL: requestedVersion["url"] ||
|
cliVersion,
|
||||||
(await getCodeQLBundleDownloadURL(tagName ||
|
codeqlURL: url,
|
||||||
// The check on `requestedVersion.tagName` is redundant but lets us
|
|
||||||
// use the property that if we don't know `requestedVersion.tagName`,
|
|
||||||
// then we must know `requestedVersion.cliVersion`. This property is
|
|
||||||
// required by the type of `getOrFindBundleTagName`.
|
|
||||||
(requestedVersion.tagName !== undefined
|
|
||||||
? requestedVersion.tagName
|
|
||||||
: await getOrFindBundleTagName(requestedVersion, logger)), apiDetails, variant, logger)),
|
|
||||||
sourceType: "download",
|
sourceType: "download",
|
||||||
toolsVersion: requestedVersion.syntheticCliVersion,
|
toolsVersion: cliVersion ?? humanReadableVersion,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.getCodeQLSource = getCodeQLSource;
|
exports.getCodeQLSource = getCodeQLSource;
|
||||||
async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, tempDir, logger) {
|
/**
|
||||||
|
* Gets a fallback version number to use when looking for CodeQL in the toolcache if we didn't find
|
||||||
|
* the `x.y.z` version. This is to support old versions of the toolcache.
|
||||||
|
*/
|
||||||
|
async function tryGetFallbackToolcacheVersion(cliVersion, tagName, variant, logger) {
|
||||||
|
//
|
||||||
|
// If we are on Dotcom, we will make an HTTP request to the Releases API here
|
||||||
|
// to find the tag name for the requested version.
|
||||||
|
if (cliVersion && !tagName && variant === util.GitHubVariant.DOTCOM) {
|
||||||
|
tagName = await findCodeQLBundleTagDotcomOnly(cliVersion, logger);
|
||||||
|
}
|
||||||
|
if (!tagName) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const bundleVersion = tryGetBundleVersionFromTagName(tagName, logger);
|
||||||
|
if (!bundleVersion) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const fallbackVersion = convertToSemVer(bundleVersion, logger);
|
||||||
|
logger.debug(`Computed a fallback toolcache version number of ${fallbackVersion} for CodeQL version ` +
|
||||||
|
`${cliVersion ?? tagName}.`);
|
||||||
|
return fallbackVersion;
|
||||||
|
}
|
||||||
|
exports.tryGetFallbackToolcacheVersion = tryGetFallbackToolcacheVersion;
|
||||||
|
async function downloadCodeQL(codeqlURL, maybeBundleVersion, maybeCliVersion, apiDetails, variant, tempDir, logger) {
|
||||||
const parsedCodeQLURL = new URL(codeqlURL);
|
const parsedCodeQLURL = new URL(codeqlURL);
|
||||||
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
||||||
const headers = {
|
const headers = {
|
||||||
@@ -404,12 +454,13 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
// from the same GitHub instance the Action is running on.
|
// from the same GitHub instance the Action is running on.
|
||||||
// This avoids leaking Enterprise tokens to dotcom.
|
// This avoids leaking Enterprise tokens to dotcom.
|
||||||
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
// We also don't want to send an authorization header if there's already a token provided in the URL.
|
||||||
|
let authorization = undefined;
|
||||||
if (searchParams.has("token")) {
|
if (searchParams.has("token")) {
|
||||||
logger.debug("CodeQL tools URL contains an authorization token.");
|
logger.debug("CodeQL tools URL contains an authorization token.");
|
||||||
}
|
}
|
||||||
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
else if (codeqlURL.startsWith(`${apiDetails.url}/`)) {
|
||||||
logger.debug("Providing an authorization token to download CodeQL tools.");
|
logger.debug("Providing an authorization token to download CodeQL tools.");
|
||||||
headers.authorization = `token ${apiDetails.auth}`;
|
authorization = `token ${apiDetails.auth}`;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.debug("Downloading CodeQL tools without an authorization token.");
|
logger.debug("Downloading CodeQL tools without an authorization token.");
|
||||||
@@ -418,16 +469,26 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
const dest = path.join(tempDir, (0, uuid_1.v4)());
|
||||||
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
const finalHeaders = Object.assign({ "User-Agent": "CodeQL Action" }, headers);
|
||||||
const toolsDownloadStart = perf_hooks_1.performance.now();
|
const toolsDownloadStart = perf_hooks_1.performance.now();
|
||||||
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, undefined, finalHeaders);
|
const codeqlPath = await toolcache.downloadTool(codeqlURL, dest, authorization, finalHeaders);
|
||||||
const toolsDownloadDurationMs = perf_hooks_1.performance.now() - toolsDownloadStart;
|
const toolsDownloadDurationMs = Math.round(perf_hooks_1.performance.now() - toolsDownloadStart);
|
||||||
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
logger.debug(`CodeQL bundle download to ${codeqlPath} complete.`);
|
||||||
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
const codeqlExtracted = await toolcache.extractTar(codeqlPath);
|
||||||
const bundleVersion = getBundleVersionFromUrl(codeqlURL);
|
const bundleVersion = maybeBundleVersion ?? tryGetBundleVersionFromUrl(codeqlURL, logger);
|
||||||
|
if (bundleVersion === undefined) {
|
||||||
|
logger.debug("Could not cache CodeQL tools because we could not determine the bundle version from the " +
|
||||||
|
`URL ${codeqlURL}.`);
|
||||||
|
return {
|
||||||
|
toolsVersion: maybeCliVersion ?? "unknown",
|
||||||
|
codeqlFolder: codeqlExtracted,
|
||||||
|
toolsDownloadDurationMs,
|
||||||
|
};
|
||||||
|
}
|
||||||
// Try to compute the CLI version for this bundle
|
// Try to compute the CLI version for this bundle
|
||||||
const cliVersion = maybeCliVersion ||
|
if (maybeCliVersion === undefined &&
|
||||||
(variant === util.GitHubVariant.DOTCOM &&
|
variant === util.GitHubVariant.DOTCOM &&
|
||||||
(await tryFindCliVersionDotcomOnly(`codeql-bundle-${bundleVersion}`, logger))) ||
|
codeqlURL.includes(`/${exports.CODEQL_DEFAULT_ACTION_REPOSITORY}/`)) {
|
||||||
undefined;
|
maybeCliVersion = await tryFindCliVersionDotcomOnly(`codeql-bundle-${bundleVersion}`, logger);
|
||||||
|
}
|
||||||
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
// Include both the CLI version and the bundle version in the toolcache version number. That way
|
||||||
// if the user requests the same URL again, we can get it from the cache without having to call
|
// if the user requests the same URL again, we can get it from the cache without having to call
|
||||||
// any of the Releases API.
|
// any of the Releases API.
|
||||||
@@ -437,11 +498,11 @@ async function downloadCodeQL(codeqlURL, maybeCliVersion, apiDetails, variant, t
|
|||||||
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
// CLI release. In principle, it should be enough to just check that the CLI version isn't a
|
||||||
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
// pre-release, but the version numbers of CodeQL nightlies have the format `x.y.z+<timestamp>`,
|
||||||
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
// and we don't want these nightlies to override stable CLI versions in the toolcache.
|
||||||
const toolcacheVersion = cliVersion && cliVersion.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
const toolcacheVersion = maybeCliVersion?.match(/^[0-9]+\.[0-9]+\.[0-9]+$/)
|
||||||
? `${cliVersion}-${bundleVersion}`
|
? `${maybeCliVersion}-${bundleVersion}`
|
||||||
: convertToSemVer(bundleVersion, logger);
|
: convertToSemVer(bundleVersion, logger);
|
||||||
return {
|
return {
|
||||||
toolsVersion: cliVersion || toolcacheVersion,
|
toolsVersion: maybeCliVersion ?? toolcacheVersion,
|
||||||
codeqlFolder: await toolcache.cacheDir(codeqlExtracted, "CodeQL", toolcacheVersion),
|
codeqlFolder: await toolcache.cacheDir(codeqlExtracted, "CodeQL", toolcacheVersion),
|
||||||
toolsDownloadDurationMs,
|
toolsDownloadDurationMs,
|
||||||
};
|
};
|
||||||
@@ -462,15 +523,14 @@ exports.getCodeQLURLVersion = getCodeQLURLVersion;
|
|||||||
* @param apiDetails
|
* @param apiDetails
|
||||||
* @param tempDir
|
* @param tempDir
|
||||||
* @param variant
|
* @param variant
|
||||||
* @param bypassToolcache
|
|
||||||
* @param defaultCliVersion
|
* @param defaultCliVersion
|
||||||
* @param logger
|
* @param logger
|
||||||
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
|
||||||
* version requirement. Must be set to true outside tests.
|
* version requirement. Must be set to true outside tests.
|
||||||
* @returns the path to the extracted bundle, and the version of the tools
|
* @returns the path to the extracted bundle, and the version of the tools
|
||||||
*/
|
*/
|
||||||
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypassToolcache, defaultCliVersion, logger) {
|
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||||
const source = await getCodeQLSource(toolsInput, bypassToolcache, defaultCliVersion, apiDetails, variant, logger);
|
const source = await getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, logger);
|
||||||
let codeqlFolder;
|
let codeqlFolder;
|
||||||
let toolsVersion = source.toolsVersion;
|
let toolsVersion = source.toolsVersion;
|
||||||
let toolsDownloadDurationMs;
|
let toolsDownloadDurationMs;
|
||||||
@@ -486,7 +546,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, bypas
|
|||||||
toolsSource = init_1.ToolsSource.Toolcache;
|
toolsSource = init_1.ToolsSource.Toolcache;
|
||||||
break;
|
break;
|
||||||
case "download": {
|
case "download": {
|
||||||
const result = await downloadCodeQL(source.codeqlURL, source.cliVersion, apiDetails, variant, tempDir, logger);
|
const result = await downloadCodeQL(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, variant, tempDir, logger);
|
||||||
toolsVersion = result.toolsVersion;
|
toolsVersion = result.toolsVersion;
|
||||||
codeqlFolder = result.codeqlFolder;
|
codeqlFolder = result.codeqlFolder;
|
||||||
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
toolsDownloadDurationMs = result.toolsDownloadDurationMs;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
7
lib/upload-lib.js
generated
7
lib/upload-lib.js
generated
@@ -293,7 +293,8 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|||||||
if (Date.now() >
|
if (Date.now() >
|
||||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
// It's possible the analysis will eventually finish processing, but it's not worth spending more
|
||||||
|
// Actions time waiting.
|
||||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -329,7 +330,9 @@ async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
|||||||
else {
|
else {
|
||||||
util.assertNever(status);
|
util.assertNever(status);
|
||||||
}
|
}
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS, {
|
||||||
|
allowProcessExit: false,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
79
lib/util.js
generated
79
lib/util.js
generated
@@ -26,7 +26,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.parseMatrixInput = exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.parseMatrixInput = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.supportExpectDiscardedCache = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
@@ -40,7 +40,6 @@ const apiCompatibility = __importStar(require("./api-compatibility.json"));
|
|||||||
const codeql_1 = require("./codeql");
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
|
||||||
const shared_environment_1 = require("./shared-environment");
|
const shared_environment_1 = require("./shared-environment");
|
||||||
/**
|
/**
|
||||||
* Specifies bundle versions that are known to be broken
|
* Specifies bundle versions that are known to be broken
|
||||||
@@ -456,16 +455,33 @@ async function bundleDb(config, language, codeql, dbName) {
|
|||||||
return databaseBundlePath;
|
return databaseBundlePath;
|
||||||
}
|
}
|
||||||
exports.bundleDb = bundleDb;
|
exports.bundleDb = bundleDb;
|
||||||
async function delay(milliseconds) {
|
/**
|
||||||
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
* @param milliseconds time to delay
|
||||||
// surrounding promise is being awaited.
|
* @param opts options
|
||||||
return new Promise((resolve) => setTimeout(resolve, milliseconds).unref());
|
* @param opts.allowProcessExit if true, the timer will not prevent the process from exiting
|
||||||
|
*/
|
||||||
|
async function delay(milliseconds, { allowProcessExit }) {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const timer = setTimeout(resolve, milliseconds);
|
||||||
|
if (allowProcessExit) {
|
||||||
|
// Immediately `unref` the timer such that it only prevents the process from exiting if the
|
||||||
|
// surrounding promise is being awaited.
|
||||||
|
timer.unref();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
exports.delay = delay;
|
exports.delay = delay;
|
||||||
function isGoodVersion(versionSpec) {
|
function isGoodVersion(versionSpec) {
|
||||||
return !BROKEN_VERSIONS.includes(versionSpec);
|
return !BROKEN_VERSIONS.includes(versionSpec);
|
||||||
}
|
}
|
||||||
exports.isGoodVersion = isGoodVersion;
|
exports.isGoodVersion = isGoodVersion;
|
||||||
|
/**
|
||||||
|
* Checks whether the CodeQL CLI supports the `--expect-discarded-cache` command-line flag.
|
||||||
|
*/
|
||||||
|
async function supportExpectDiscardedCache(codeQL) {
|
||||||
|
return codeQlVersionAbove(codeQL, "2.12.1");
|
||||||
|
}
|
||||||
|
exports.supportExpectDiscardedCache = supportExpectDiscardedCache;
|
||||||
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
exports.ML_POWERED_JS_QUERIES_PACK_NAME = "codeql/javascript-experimental-atm-queries";
|
||||||
/**
|
/**
|
||||||
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
* Gets the ML-powered JS query pack to add to the analysis if a repo is opted into the ML-powered
|
||||||
@@ -544,12 +560,12 @@ exports.isInTestMode = isInTestMode;
|
|||||||
* @returns true if the action should generate a conde-scanning config file
|
* @returns true if the action should generate a conde-scanning config file
|
||||||
* that gets passed to the CLI.
|
* that gets passed to the CLI.
|
||||||
*/
|
*/
|
||||||
async function useCodeScanningConfigInCli(codeql, featureEnablement) {
|
async function useCodeScanningConfigInCli(codeql, features) {
|
||||||
return await featureEnablement.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
return await features.getValue(feature_flags_1.Feature.CliConfigFileEnabled, codeql);
|
||||||
}
|
}
|
||||||
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
exports.useCodeScanningConfigInCli = useCodeScanningConfigInCli;
|
||||||
async function logCodeScanningConfigInCli(codeql, featureEnablement, logger) {
|
async function logCodeScanningConfigInCli(codeql, features, logger) {
|
||||||
if (await useCodeScanningConfigInCli(codeql, featureEnablement)) {
|
if (await useCodeScanningConfigInCli(codeql, features)) {
|
||||||
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
logger.info("Code Scanning configuration file being processed in the codeql CLI.");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
@@ -637,7 +653,7 @@ async function withTimeout(timeoutMs, promise, onTimeout) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
const timeoutTask = async () => {
|
const timeoutTask = async () => {
|
||||||
await delay(timeoutMs);
|
await delay(timeoutMs, { allowProcessExit: true });
|
||||||
if (!finished) {
|
if (!finished) {
|
||||||
// Workaround: While the promise racing below will allow the main code
|
// Workaround: While the promise racing below will allow the main code
|
||||||
// to continue, the process won't normally exit until the asynchronous
|
// to continue, the process won't normally exit until the asynchronous
|
||||||
@@ -660,7 +676,7 @@ exports.withTimeout = withTimeout;
|
|||||||
async function checkForTimeout() {
|
async function checkForTimeout() {
|
||||||
if (hadTimeout === true) {
|
if (hadTimeout === true) {
|
||||||
core.info("A timeout occurred, force exiting the process after 30 seconds to prevent hanging.");
|
core.info("A timeout occurred, force exiting the process after 30 seconds to prevent hanging.");
|
||||||
await delay(30000);
|
await delay(30000, { allowProcessExit: true });
|
||||||
process.exit();
|
process.exit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -685,45 +701,6 @@ function isHostedRunner() {
|
|||||||
process.env["RUNNER_TOOL_CACHE"]?.includes("hostedtoolcache"));
|
process.env["RUNNER_TOOL_CACHE"]?.includes("hostedtoolcache"));
|
||||||
}
|
}
|
||||||
exports.isHostedRunner = isHostedRunner;
|
exports.isHostedRunner = isHostedRunner;
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param featuresEnablement The features enabled for the current run
|
|
||||||
* @param languagesInput Languages input from the workflow
|
|
||||||
* @param repository The owner/name of the repository
|
|
||||||
* @param logger A logger
|
|
||||||
* @returns A boolean indicating whether or not the toolcache should be bypassed and the latest codeql should be downloaded.
|
|
||||||
*/
|
|
||||||
async function shouldBypassToolcache(featuresEnablement, codeqlUrl, languagesInput, repository, logger) {
|
|
||||||
// An explicit codeql url is specified, that means the toolcache will not be used.
|
|
||||||
if (codeqlUrl) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Check if the toolcache is disabled for all languages
|
|
||||||
if (await featuresEnablement.getValue(feature_flags_1.Feature.BypassToolcacheEnabled)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Check if the toolcache is disabled for kotlin and swift.
|
|
||||||
if (!(await featuresEnablement.getValue(feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled))) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// Now check to see if kotlin or swift is one of the languages being analyzed.
|
|
||||||
const { rawLanguages, autodetected } = await (0, config_utils_1.getRawLanguages)(languagesInput, repository, logger);
|
|
||||||
let bypass = rawLanguages.some((lang) => languages_1.KOTLIN_SWIFT_BYPASS.includes(lang));
|
|
||||||
if (bypass) {
|
|
||||||
logger.info(`Bypassing toolcache for kotlin or swift. Languages: ${rawLanguages}`);
|
|
||||||
}
|
|
||||||
else if (!autodetected && rawLanguages.includes(languages_1.Language.java)) {
|
|
||||||
// special case: java was explicitly specified, but there might be
|
|
||||||
// some kotlin in the repository, so we need to make a request for that.
|
|
||||||
const langsInRepo = await (0, config_utils_1.getLanguagesInRepo)(repository, logger);
|
|
||||||
if (langsInRepo.includes("kotlin")) {
|
|
||||||
logger.info(`Bypassing toolcache for kotlin.`);
|
|
||||||
bypass = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return bypass;
|
|
||||||
}
|
|
||||||
exports.shouldBypassToolcache = shouldBypassToolcache;
|
|
||||||
function parseMatrixInput(matrixInput) {
|
function parseMatrixInput(matrixInput) {
|
||||||
if (matrixInput === undefined || matrixInput === "null") {
|
if (matrixInput === undefined || matrixInput === "null") {
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
115
lib/util.test.js
generated
115
lib/util.test.js
generated
@@ -33,9 +33,7 @@ const github = __importStar(require("@actions/github"));
|
|||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const feature_flags_1 = require("./feature-flags");
|
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
@@ -325,117 +323,4 @@ const shortTime = 10;
|
|||||||
t.deepEqual(shortTaskTimedOut, false);
|
t.deepEqual(shortTaskTimedOut, false);
|
||||||
t.deepEqual(result, 99);
|
t.deepEqual(result, 99);
|
||||||
});
|
});
|
||||||
const mockRepositoryNwo = (0, repository_1.parseRepositoryNwo)("owner/repo");
|
|
||||||
// eslint-disable-next-line github/array-foreach
|
|
||||||
[
|
|
||||||
{
|
|
||||||
name: "disabled",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "disabled even though swift kotlin bypassed",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "disabled even though swift kotlin analyzed",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: " sWiFt , KoTlIn ",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "toolcache bypass all",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "custom CodeQL",
|
|
||||||
features: [],
|
|
||||||
hasCustomCodeQL: true,
|
|
||||||
languagesInput: undefined,
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass swift",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: " sWiFt ,other",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass kotlin",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "other, KoTlIn ",
|
|
||||||
languagesInRepository: [],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass kotlin language from repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "",
|
|
||||||
languagesInRepository: ["KoTlIn", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass swift language from repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "",
|
|
||||||
languagesInRepository: ["SwiFt", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bypass java from input if there is kotlin in repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "java",
|
|
||||||
languagesInRepository: ["kotlin", "other"],
|
|
||||||
expected: true,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "don't bypass java from input if there is no kotlin in repository",
|
|
||||||
features: [feature_flags_1.Feature.BypassToolcacheKotlinSwiftEnabled],
|
|
||||||
hasCustomCodeQL: false,
|
|
||||||
languagesInput: "java",
|
|
||||||
languagesInRepository: ["java", "other"],
|
|
||||||
expected: false,
|
|
||||||
expectedApiCall: true,
|
|
||||||
},
|
|
||||||
].forEach((args) => {
|
|
||||||
(0, ava_1.default)(`shouldBypassToolcache: ${args.name}`, async (t) => {
|
|
||||||
const mockRequest = (0, testing_utils_1.mockLanguagesInRepo)(args.languagesInRepository);
|
|
||||||
const mockLogger = (0, logging_1.getRunnerLogger)(true);
|
|
||||||
const featureEnablement = (0, testing_utils_1.createFeatures)(args.features);
|
|
||||||
const codeqlUrl = args.hasCustomCodeQL ? "custom-codeql-url" : undefined;
|
|
||||||
const actual = await util.shouldBypassToolcache(featureEnablement, codeqlUrl, args.languagesInput, mockRepositoryNwo, mockLogger);
|
|
||||||
t.deepEqual(actual, args.expected);
|
|
||||||
t.deepEqual(mockRequest.called, args.expectedApiCall);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
//# sourceMappingURL=util.test.js.map
|
//# sourceMappingURL=util.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
198
node_modules/.package-lock.json
generated
vendored
198
node_modules/.package-lock.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.2.0",
|
"version": "2.2.7",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
@@ -142,16 +142,16 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@ava/typescript": {
|
"node_modules/@ava/typescript": {
|
||||||
"version": "3.0.1",
|
"version": "4.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/@ava/typescript/-/typescript-3.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/@ava/typescript/-/typescript-4.0.0.tgz",
|
||||||
"integrity": "sha512-/JXIUuKsvkaneaiA9ckk3ksFTqvu0mDNlChASrTe2BnDsvMbhQdPWyqQjJ9WRJWVhhs5TWn1/0Pp1G6Rv8Syrw==",
|
"integrity": "sha512-QFIPeqkEbdvn7Pob0wVeYpeZD0eXd8nDYdCl+knJVaIJrHdF2fXa58vFaig26cmYwnsEN0KRNTYJKbqW1B0lfg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"escape-string-regexp": "^5.0.0",
|
"escape-string-regexp": "^5.0.0",
|
||||||
"execa": "^5.1.1"
|
"execa": "^7.1.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
|
"node": ">=14.19 <15 || >=16.15 <17 || >=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@ava/typescript/node_modules/escape-string-regexp": {
|
"node_modules/@ava/typescript/node_modules/escape-string-regexp": {
|
||||||
@@ -3043,40 +3043,28 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/execa": {
|
"node_modules/execa": {
|
||||||
"version": "5.1.1",
|
"version": "7.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/execa/-/execa-7.1.0.tgz",
|
||||||
"integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==",
|
"integrity": "sha512-T6nIJO3LHxUZ6ahVRaxXz9WLEruXLqdcluA+UuTptXmLM7nDAn9lx9IfkxPyzEL21583qSt4RmL44pO71EHaJQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cross-spawn": "^7.0.3",
|
"cross-spawn": "^7.0.3",
|
||||||
"get-stream": "^6.0.0",
|
"get-stream": "^6.0.1",
|
||||||
"human-signals": "^2.1.0",
|
"human-signals": "^4.3.0",
|
||||||
"is-stream": "^2.0.0",
|
"is-stream": "^3.0.0",
|
||||||
"merge-stream": "^2.0.0",
|
"merge-stream": "^2.0.0",
|
||||||
"npm-run-path": "^4.0.1",
|
"npm-run-path": "^5.1.0",
|
||||||
"onetime": "^5.1.2",
|
"onetime": "^6.0.0",
|
||||||
"signal-exit": "^3.0.3",
|
"signal-exit": "^3.0.7",
|
||||||
"strip-final-newline": "^2.0.0"
|
"strip-final-newline": "^3.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": "^14.18.0 || ^16.14.0 || >=18.0.0"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sindresorhus/execa?sponsor=1"
|
"url": "https://github.com/sindresorhus/execa?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/execa/node_modules/get-stream": {
|
|
||||||
"version": "6.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
|
|
||||||
"integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
|
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/fast-deep-equal": {
|
"node_modules/fast-deep-equal": {
|
||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
|
||||||
@@ -3328,6 +3316,18 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/get-stream": {
|
||||||
|
"version": "6.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
|
||||||
|
"integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==",
|
||||||
|
"dev": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=10"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/get-symbol-description": {
|
"node_modules/get-symbol-description": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz",
|
||||||
@@ -3354,19 +3354,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/glob": {
|
"node_modules/glob": {
|
||||||
"version": "8.0.1",
|
"version": "9.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/glob/-/glob-8.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/glob/-/glob-9.2.1.tgz",
|
||||||
"integrity": "sha512-cF7FYZZ47YzmCu7dDy50xSRRfO3ErRfrXuLZcNIuyiJEco0XSrGtuilG19L5xp3NcwTx7Gn+X6Tv3fmsUPTbow==",
|
"integrity": "sha512-Pxxgq3W0HyA3XUvSXcFhRSs+43Jsx0ddxcFrbjxNGkL2Ak5BAUBxLqI5G6ADDeCHLfzzXFhe0b1yYcctGmytMA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fs.realpath": "^1.0.0",
|
"fs.realpath": "^1.0.0",
|
||||||
"inflight": "^1.0.4",
|
"minimatch": "^7.4.1",
|
||||||
"inherits": "2",
|
"minipass": "^4.2.4",
|
||||||
"minimatch": "^5.0.1",
|
"path-scurry": "^1.6.1"
|
||||||
"once": "^1.3.0",
|
|
||||||
"path-is-absolute": "^1.0.0"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12"
|
"node": ">=16 || 14 >=14.17"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
@@ -3392,14 +3390,17 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/glob/node_modules/minimatch": {
|
"node_modules/glob/node_modules/minimatch": {
|
||||||
"version": "5.0.1",
|
"version": "7.4.2",
|
||||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz",
|
||||||
"integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==",
|
"integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brace-expansion": "^2.0.1"
|
"brace-expansion": "^2.0.1"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": ">=10"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/globals": {
|
"node_modules/globals": {
|
||||||
@@ -3559,12 +3560,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/human-signals": {
|
"node_modules/human-signals": {
|
||||||
"version": "2.1.0",
|
"version": "4.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.0.tgz",
|
||||||
"integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==",
|
"integrity": "sha512-zyzVyMjpGBX2+6cDVZeFPCdtOtdsxOeseRhB9tkQ6xXmGUNrcnBzdEKPy3VPNYz+4gy1oukVOXcrJCunSyc6QQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10.17.0"
|
"node": ">=14.18.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/ignore": {
|
"node_modules/ignore": {
|
||||||
@@ -3938,12 +3939,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/is-stream": {
|
"node_modules/is-stream": {
|
||||||
"version": "2.0.1",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz",
|
||||||
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
|
"integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
@@ -4340,18 +4341,6 @@
|
|||||||
"url": "https://github.com/sindresorhus/mem?sponsor=1"
|
"url": "https://github.com/sindresorhus/mem?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mem/node_modules/mimic-fn": {
|
|
||||||
"version": "4.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz",
|
|
||||||
"integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==",
|
|
||||||
"dev": true,
|
|
||||||
"engines": {
|
|
||||||
"node": ">=12"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/merge-stream": {
|
"node_modules/merge-stream": {
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz",
|
||||||
@@ -4398,11 +4387,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mimic-fn": {
|
"node_modules/mimic-fn": {
|
||||||
"version": "2.1.0",
|
"version": "4.0.0",
|
||||||
"integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==",
|
"resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/minimatch": {
|
"node_modules/minimatch": {
|
||||||
@@ -4425,6 +4418,14 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/minipass": {
|
||||||
|
"version": "4.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.4.tgz",
|
||||||
|
"integrity": "sha512-lwycX3cBMTvcejsHITUgYj6Gy6A7Nh4Q6h9NP4sTHY1ccJlC7yKzDmiShEHsJ16Jf1nKGDEaiHxiltsJEvk0nQ==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/ms": {
|
"node_modules/ms": {
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
|
||||||
@@ -4531,15 +4532,30 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/npm-run-path": {
|
"node_modules/npm-run-path": {
|
||||||
"version": "4.0.1",
|
"version": "5.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz",
|
||||||
"integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==",
|
"integrity": "sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"path-key": "^3.0.0"
|
"path-key": "^4.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/npm-run-path/node_modules/path-key": {
|
||||||
|
"version": "4.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz",
|
||||||
|
"integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==",
|
||||||
|
"dev": true,
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/object-inspect": {
|
"node_modules/object-inspect": {
|
||||||
@@ -4650,15 +4666,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/onetime": {
|
"node_modules/onetime": {
|
||||||
"version": "5.1.2",
|
"version": "6.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz",
|
||||||
"integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
|
"integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mimic-fn": "^2.1.0"
|
"mimic-fn": "^4.0.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=12"
|
||||||
},
|
},
|
||||||
"funding": {
|
"funding": {
|
||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
@@ -4839,6 +4855,29 @@
|
|||||||
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
|
||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
|
"node_modules/path-scurry": {
|
||||||
|
"version": "1.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.6.1.tgz",
|
||||||
|
"integrity": "sha512-OW+5s+7cw6253Q4E+8qQ/u1fVvcJQCJo/VFD8pje+dbJCF1n5ZRMV2AEHbGp+5Q7jxQIYJxkHopnj6nzdGeZLA==",
|
||||||
|
"dependencies": {
|
||||||
|
"lru-cache": "^7.14.1",
|
||||||
|
"minipass": "^4.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/isaacs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/path-scurry/node_modules/lru-cache": {
|
||||||
|
"version": "7.18.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
|
||||||
|
"integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/path-to-regexp": {
|
"node_modules/path-to-regexp": {
|
||||||
"version": "1.8.0",
|
"version": "1.8.0",
|
||||||
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
|
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz",
|
||||||
@@ -5472,12 +5511,15 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/strip-final-newline": {
|
"node_modules/strip-final-newline": {
|
||||||
"version": "2.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz",
|
||||||
"integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
|
"integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=6"
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/strip-json-comments": {
|
"node_modules/strip-json-comments": {
|
||||||
|
|||||||
16
node_modules/@ava/typescript/README.md
generated
vendored
16
node_modules/@ava/typescript/README.md
generated
vendored
@@ -1,21 +1,11 @@
|
|||||||
# @ava/typescript
|
# @ava/typescript
|
||||||
|
|
||||||
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA 4](https://avajs.dev).
|
Adds [TypeScript](https://www.typescriptlang.org/) support to [AVA](https://avajs.dev).
|
||||||
|
|
||||||
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
This is designed to work for projects that precompile TypeScript. It allows AVA to load the compiled JavaScript, while configuring AVA to treat the TypeScript files as test files.
|
||||||
|
|
||||||
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
In other words, say you have a test file at `src/test.ts`. You've configured TypeScript to output to `build/`. Using `@ava/typescript` you can run the test using `npx ava src/test.ts`.
|
||||||
|
|
||||||
## For AVA 3 users
|
|
||||||
|
|
||||||
Use version 2:
|
|
||||||
|
|
||||||
```console
|
|
||||||
npm install --save-dev @ava/typescript@2
|
|
||||||
```
|
|
||||||
|
|
||||||
Note that v2 does not support ES modules. This requires v3 and AVA 4.
|
|
||||||
|
|
||||||
## Enabling TypeScript support
|
## Enabling TypeScript support
|
||||||
|
|
||||||
Add this package to your project:
|
Add this package to your project:
|
||||||
@@ -47,7 +37,7 @@ You can enable compilation via the `compile` property. If `false`, AVA will assu
|
|||||||
|
|
||||||
Output files are expected to have the `.js` extension.
|
Output files are expected to have the `.js` extension.
|
||||||
|
|
||||||
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs` and `*.ts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
AVA searches your entire project for `*.js`, `*.cjs`, `*.mjs`, `*.ts`, `*.cts` and `*.mts` files (or other extensions you've configured). It will ignore such files found in the `rewritePaths` targets (e.g. `build/`). If you use more specific paths, for instance `build/main/`, you may need to change AVA's `files` configuration to ignore other directories.
|
||||||
|
|
||||||
## ES Modules
|
## ES Modules
|
||||||
|
|
||||||
@@ -75,6 +65,8 @@ You can configure AVA to recognize additional file extensions. To add (partial
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you use the [`allowJs` TypeScript option](https://www.typescriptlang.org/tsconfig/allowJs.html) you'll have to specify the `js`, `cjs` and `mjs` extensions for them to be rewritten.
|
||||||
|
|
||||||
See also AVA's [`extensions` option](https://github.com/avajs/ava/blob/master/docs/06-configuration.md#options).
|
See also AVA's [`extensions` option](https://github.com/avajs/ava/blob/master/docs/06-configuration.md#options).
|
||||||
|
|
||||||
† Note that the [*preserve* mode for JSX](https://www.typescriptlang.org/docs/handbook/jsx.html) is not (yet) supported.
|
† Note that the [*preserve* mode for JSX](https://www.typescriptlang.org/docs/handbook/jsx.html) is not (yet) supported.
|
||||||
|
|||||||
40
node_modules/@ava/typescript/index.js
generated
vendored
40
node_modules/@ava/typescript/index.js
generated
vendored
@@ -2,7 +2,7 @@ import fs from 'node:fs';
|
|||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import {pathToFileURL} from 'node:url';
|
import {pathToFileURL} from 'node:url';
|
||||||
import escapeStringRegexp from 'escape-string-regexp';
|
import escapeStringRegexp from 'escape-string-regexp';
|
||||||
import execa from 'execa';
|
import {execa} from 'execa';
|
||||||
|
|
||||||
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
|
const pkg = JSON.parse(fs.readFileSync(new URL('package.json', import.meta.url)));
|
||||||
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
|
const help = `See https://github.com/avajs/typescript/blob/v${pkg.version}/README.md`;
|
||||||
@@ -83,7 +83,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
validate(config, configProperties);
|
validate(config, configProperties);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
extensions = ['ts'],
|
extensions = ['ts', 'cts', 'mts'],
|
||||||
rewritePaths: relativeRewritePaths,
|
rewritePaths: relativeRewritePaths,
|
||||||
compile,
|
compile,
|
||||||
} = config;
|
} = config;
|
||||||
@@ -118,7 +118,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
return rewritePaths.some(([from]) => filePath.startsWith(from));
|
return rewritePaths.some(([from]) => filePath.startsWith(from));
|
||||||
},
|
},
|
||||||
|
|
||||||
resolveTestFile(testfile) {
|
resolveTestFile(testfile) { // Used under AVA 3.2 protocol by legacy watcher implementation.
|
||||||
if (!testFileExtension.test(testfile)) {
|
if (!testFileExtension.test(testfile)) {
|
||||||
return testfile;
|
return testfile;
|
||||||
}
|
}
|
||||||
@@ -129,8 +129,14 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const [from, to] = rewrite;
|
const [from, to] = rewrite;
|
||||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
let newExtension = '.js';
|
||||||
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, '.js');
|
if (testfile.endsWith('.cts')) {
|
||||||
|
newExtension = '.cjs';
|
||||||
|
} else if (testfile.endsWith('.mts')) {
|
||||||
|
newExtension = '.mjs';
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${to}${testfile.slice(from.length)}`.replace(testFileExtension, newExtension);
|
||||||
},
|
},
|
||||||
|
|
||||||
updateGlobs({filePatterns, ignoredByWatcherPatterns}) {
|
updateGlobs({filePatterns, ignoredByWatcherPatterns}) {
|
||||||
@@ -142,7 +148,11 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
],
|
],
|
||||||
ignoredByWatcherPatterns: [
|
ignoredByWatcherPatterns: [
|
||||||
...ignoredByWatcherPatterns,
|
...ignoredByWatcherPatterns,
|
||||||
...Object.values(relativeRewritePaths).map(to => `${to}**/*.js.map`),
|
...Object.values(relativeRewritePaths).flatMap(to => [
|
||||||
|
`${to}**/*.js.map`,
|
||||||
|
`${to}**/*.cjs.map`,
|
||||||
|
`${to}**/*.mjs.map`,
|
||||||
|
]),
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@@ -150,7 +160,7 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
},
|
},
|
||||||
|
|
||||||
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
worker({extensionsToLoadAsModules, state: {extensions, rewritePaths}}) {
|
||||||
const useImport = extensionsToLoadAsModules.includes('js');
|
const importJs = extensionsToLoadAsModules.includes('js');
|
||||||
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
const testFileExtension = new RegExp(`\\.(${extensions.map(ext => escapeStringRegexp(ext)).join('|')})$`);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -160,9 +170,19 @@ export default function typescriptProvider({negotiateProtocol}) {
|
|||||||
|
|
||||||
async load(ref, {requireFn}) {
|
async load(ref, {requireFn}) {
|
||||||
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
const [from, to] = rewritePaths.find(([from]) => ref.startsWith(from));
|
||||||
// TODO: Support JSX preserve mode — https://www.typescriptlang.org/docs/handbook/jsx.html
|
let rewritten = `${to}${ref.slice(from.length)}`;
|
||||||
const rewritten = `${to}${ref.slice(from.length)}`.replace(testFileExtension, '.js');
|
let useImport = true;
|
||||||
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten); // eslint-disable-line node/no-unsupported-features/es-syntax
|
if (ref.endsWith('.cts')) {
|
||||||
|
rewritten = rewritten.replace(/\.cts$/, '.cjs');
|
||||||
|
useImport = false;
|
||||||
|
} else if (ref.endsWith('.mts')) {
|
||||||
|
rewritten = rewritten.replace(/\.mts$/, '.mjs');
|
||||||
|
} else {
|
||||||
|
rewritten = rewritten.replace(testFileExtension, '.js');
|
||||||
|
useImport = importJs;
|
||||||
|
}
|
||||||
|
|
||||||
|
return useImport ? import(pathToFileURL(rewritten)) : requireFn(rewritten);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|||||||
19
node_modules/@ava/typescript/package.json
generated
vendored
19
node_modules/@ava/typescript/package.json
generated
vendored
@@ -1,9 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@ava/typescript",
|
"name": "@ava/typescript",
|
||||||
"version": "3.0.1",
|
"version": "4.0.0",
|
||||||
"description": "TypeScript provider for AVA",
|
"description": "TypeScript provider for AVA",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.22 <13 || >=14.17 <15 || >=16.4 <17 || >=17"
|
"node": ">=14.19 <15 || >=16.15 <17 || >=18"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"index.js"
|
"index.js"
|
||||||
@@ -24,14 +24,14 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"escape-string-regexp": "^5.0.0",
|
"escape-string-regexp": "^5.0.0",
|
||||||
"execa": "^5.1.1"
|
"execa": "^7.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"ava": "4.0.0-rc.1",
|
"ava": "^5.2.0",
|
||||||
"c8": "^7.10.0",
|
"c8": "^7.13.0",
|
||||||
"del": "^6.0.0",
|
"del": "^7.0.0",
|
||||||
"typescript": "^4.4.4",
|
"typescript": "^4.9.5",
|
||||||
"xo": "^0.46.3"
|
"xo": "^0.53.1"
|
||||||
},
|
},
|
||||||
"c8": {
|
"c8": {
|
||||||
"reporter": [
|
"reporter": [
|
||||||
@@ -52,7 +52,8 @@
|
|||||||
},
|
},
|
||||||
"xo": {
|
"xo": {
|
||||||
"ignores": [
|
"ignores": [
|
||||||
"test/broken-fixtures"
|
"test/broken-fixtures",
|
||||||
|
"test/fixtures/**/compiled/**"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1436
node_modules/execa/index.d.ts
generated
vendored
1436
node_modules/execa/index.d.ts
generated
vendored
File diff suppressed because it is too large
Load Diff
119
node_modules/execa/index.js
generated
vendored
119
node_modules/execa/index.js
generated
vendored
@@ -1,16 +1,19 @@
|
|||||||
'use strict';
|
import {Buffer} from 'node:buffer';
|
||||||
const path = require('path');
|
import path from 'node:path';
|
||||||
const childProcess = require('child_process');
|
import childProcess from 'node:child_process';
|
||||||
const crossSpawn = require('cross-spawn');
|
import process from 'node:process';
|
||||||
const stripFinalNewline = require('strip-final-newline');
|
import crossSpawn from 'cross-spawn';
|
||||||
const npmRunPath = require('npm-run-path');
|
import stripFinalNewline from 'strip-final-newline';
|
||||||
const onetime = require('onetime');
|
import {npmRunPathEnv} from 'npm-run-path';
|
||||||
const makeError = require('./lib/error');
|
import onetime from 'onetime';
|
||||||
const normalizeStdio = require('./lib/stdio');
|
import {makeError} from './lib/error.js';
|
||||||
const {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} = require('./lib/kill');
|
import {normalizeStdio, normalizeStdioNode} from './lib/stdio.js';
|
||||||
const {handleInput, getSpawnedResult, makeAllStream, validateInputSync} = require('./lib/stream');
|
import {spawnedKill, spawnedCancel, setupTimeout, validateTimeout, setExitHandler} from './lib/kill.js';
|
||||||
const {mergePromise, getSpawnedPromise} = require('./lib/promise');
|
import {addPipeMethods} from './lib/pipe.js';
|
||||||
const {joinCommand, parseCommand, getEscapedCommand} = require('./lib/command');
|
import {handleInput, getSpawnedResult, makeAllStream, handleInputSync} from './lib/stream.js';
|
||||||
|
import {mergePromise, getSpawnedPromise} from './lib/promise.js';
|
||||||
|
import {joinCommand, parseCommand, parseTemplates, getEscapedCommand} from './lib/command.js';
|
||||||
|
import {logCommand, verboseDefault} from './lib/verbose.js';
|
||||||
|
|
||||||
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
|
const DEFAULT_MAX_BUFFER = 1000 * 1000 * 100;
|
||||||
|
|
||||||
@@ -18,7 +21,7 @@ const getEnv = ({env: envOption, extendEnv, preferLocal, localDir, execPath}) =>
|
|||||||
const env = extendEnv ? {...process.env, ...envOption} : envOption;
|
const env = extendEnv ? {...process.env, ...envOption} : envOption;
|
||||||
|
|
||||||
if (preferLocal) {
|
if (preferLocal) {
|
||||||
return npmRunPath.env({env, cwd: localDir, execPath});
|
return npmRunPathEnv({env, cwd: localDir, execPath});
|
||||||
}
|
}
|
||||||
|
|
||||||
return env;
|
return env;
|
||||||
@@ -43,7 +46,8 @@ const handleArguments = (file, args, options = {}) => {
|
|||||||
cleanup: true,
|
cleanup: true,
|
||||||
all: false,
|
all: false,
|
||||||
windowsHide: true,
|
windowsHide: true,
|
||||||
...options
|
verbose: verboseDefault,
|
||||||
|
...options,
|
||||||
};
|
};
|
||||||
|
|
||||||
options.env = getEnv(options);
|
options.env = getEnv(options);
|
||||||
@@ -60,7 +64,7 @@ const handleArguments = (file, args, options = {}) => {
|
|||||||
|
|
||||||
const handleOutput = (options, value, error) => {
|
const handleOutput = (options, value, error) => {
|
||||||
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
|
if (typeof value !== 'string' && !Buffer.isBuffer(value)) {
|
||||||
// When `execa.sync()` errors, we normalize it to '' to mimic `execa()`
|
// When `execaSync()` errors, we normalize it to '' to mimic `execa()`
|
||||||
return error === undefined ? undefined : '';
|
return error === undefined ? undefined : '';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,10 +75,11 @@ const handleOutput = (options, value, error) => {
|
|||||||
return value;
|
return value;
|
||||||
};
|
};
|
||||||
|
|
||||||
const execa = (file, args, options) => {
|
export function execa(file, args, options) {
|
||||||
const parsed = handleArguments(file, args, options);
|
const parsed = handleArguments(file, args, options);
|
||||||
const command = joinCommand(file, args);
|
const command = joinCommand(file, args);
|
||||||
const escapedCommand = getEscapedCommand(file, args);
|
const escapedCommand = getEscapedCommand(file, args);
|
||||||
|
logCommand(escapedCommand, parsed.options);
|
||||||
|
|
||||||
validateTimeout(parsed.options);
|
validateTimeout(parsed.options);
|
||||||
|
|
||||||
@@ -94,9 +99,10 @@ const execa = (file, args, options) => {
|
|||||||
parsed,
|
parsed,
|
||||||
timedOut: false,
|
timedOut: false,
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
killed: false
|
killed: false,
|
||||||
}));
|
}));
|
||||||
return mergePromise(dummySpawned, errorPromise);
|
mergePromise(dummySpawned, errorPromise);
|
||||||
|
return dummySpawned;
|
||||||
}
|
}
|
||||||
|
|
||||||
const spawnedPromise = getSpawnedPromise(spawned);
|
const spawnedPromise = getSpawnedPromise(spawned);
|
||||||
@@ -126,8 +132,8 @@ const execa = (file, args, options) => {
|
|||||||
escapedCommand,
|
escapedCommand,
|
||||||
parsed,
|
parsed,
|
||||||
timedOut,
|
timedOut,
|
||||||
isCanceled: context.isCanceled,
|
isCanceled: context.isCanceled || (parsed.options.signal ? parsed.options.signal.aborted : false),
|
||||||
killed: spawned.killed
|
killed: spawned.killed,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!parsed.options.reject) {
|
if (!parsed.options.reject) {
|
||||||
@@ -147,31 +153,32 @@ const execa = (file, args, options) => {
|
|||||||
failed: false,
|
failed: false,
|
||||||
timedOut: false,
|
timedOut: false,
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
killed: false
|
killed: false,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const handlePromiseOnce = onetime(handlePromise);
|
const handlePromiseOnce = onetime(handlePromise);
|
||||||
|
|
||||||
handleInput(spawned, parsed.options.input);
|
handleInput(spawned, parsed.options);
|
||||||
|
|
||||||
spawned.all = makeAllStream(spawned, parsed.options);
|
spawned.all = makeAllStream(spawned, parsed.options);
|
||||||
|
|
||||||
return mergePromise(spawned, handlePromiseOnce);
|
addPipeMethods(spawned);
|
||||||
};
|
mergePromise(spawned, handlePromiseOnce);
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = execa;
|
export function execaSync(file, args, options) {
|
||||||
|
|
||||||
module.exports.sync = (file, args, options) => {
|
|
||||||
const parsed = handleArguments(file, args, options);
|
const parsed = handleArguments(file, args, options);
|
||||||
const command = joinCommand(file, args);
|
const command = joinCommand(file, args);
|
||||||
const escapedCommand = getEscapedCommand(file, args);
|
const escapedCommand = getEscapedCommand(file, args);
|
||||||
|
logCommand(escapedCommand, parsed.options);
|
||||||
|
|
||||||
validateInputSync(parsed.options);
|
const input = handleInputSync(parsed.options);
|
||||||
|
|
||||||
let result;
|
let result;
|
||||||
try {
|
try {
|
||||||
result = childProcess.spawnSync(parsed.file, parsed.args, parsed.options);
|
result = childProcess.spawnSync(parsed.file, parsed.args, {...parsed.options, input});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw makeError({
|
throw makeError({
|
||||||
error,
|
error,
|
||||||
@@ -183,7 +190,7 @@ module.exports.sync = (file, args, options) => {
|
|||||||
parsed,
|
parsed,
|
||||||
timedOut: false,
|
timedOut: false,
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
killed: false
|
killed: false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,7 +209,7 @@ module.exports.sync = (file, args, options) => {
|
|||||||
parsed,
|
parsed,
|
||||||
timedOut: result.error && result.error.code === 'ETIMEDOUT',
|
timedOut: result.error && result.error.code === 'ETIMEDOUT',
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
killed: result.signal !== null
|
killed: result.signal !== null,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!parsed.options.reject) {
|
if (!parsed.options.reject) {
|
||||||
@@ -221,32 +228,52 @@ module.exports.sync = (file, args, options) => {
|
|||||||
failed: false,
|
failed: false,
|
||||||
timedOut: false,
|
timedOut: false,
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
killed: false
|
killed: false,
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|
||||||
module.exports.command = (command, options) => {
|
function create$(options) {
|
||||||
|
function $(templatesOrOptions, ...expressions) {
|
||||||
|
if (Array.isArray(templatesOrOptions)) {
|
||||||
|
const [file, ...args] = parseTemplates(templatesOrOptions, expressions);
|
||||||
|
return execa(file, args, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
return create$({...options, ...templatesOrOptions});
|
||||||
|
}
|
||||||
|
|
||||||
|
$.sync = (templates, ...expressions) => {
|
||||||
|
const [file, ...args] = parseTemplates(templates, expressions);
|
||||||
|
return execaSync(file, args, options);
|
||||||
|
};
|
||||||
|
|
||||||
|
return $;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const $ = create$({preferLocal: true});
|
||||||
|
|
||||||
|
export function execaCommand(command, options) {
|
||||||
const [file, ...args] = parseCommand(command);
|
const [file, ...args] = parseCommand(command);
|
||||||
return execa(file, args, options);
|
return execa(file, args, options);
|
||||||
};
|
}
|
||||||
|
|
||||||
module.exports.commandSync = (command, options) => {
|
export function execaCommandSync(command, options) {
|
||||||
const [file, ...args] = parseCommand(command);
|
const [file, ...args] = parseCommand(command);
|
||||||
return execa.sync(file, args, options);
|
return execaSync(file, args, options);
|
||||||
};
|
}
|
||||||
|
|
||||||
module.exports.node = (scriptPath, args, options = {}) => {
|
export function execaNode(scriptPath, args, options = {}) {
|
||||||
if (args && !Array.isArray(args) && typeof args === 'object') {
|
if (args && !Array.isArray(args) && typeof args === 'object') {
|
||||||
options = args;
|
options = args;
|
||||||
args = [];
|
args = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
const stdio = normalizeStdio.node(options);
|
const stdio = normalizeStdioNode(options);
|
||||||
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
|
const defaultExecArgv = process.execArgv.filter(arg => !arg.startsWith('--inspect'));
|
||||||
|
|
||||||
const {
|
const {
|
||||||
nodePath = process.execPath,
|
nodePath = process.execPath,
|
||||||
nodeOptions = defaultExecArgv
|
nodeOptions = defaultExecArgv,
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
return execa(
|
return execa(
|
||||||
@@ -254,7 +281,7 @@ module.exports.node = (scriptPath, args, options = {}) => {
|
|||||||
[
|
[
|
||||||
...nodeOptions,
|
...nodeOptions,
|
||||||
scriptPath,
|
scriptPath,
|
||||||
...(Array.isArray(args) ? args : [])
|
...(Array.isArray(args) ? args : []),
|
||||||
],
|
],
|
||||||
{
|
{
|
||||||
...options,
|
...options,
|
||||||
@@ -262,7 +289,7 @@ module.exports.node = (scriptPath, args, options = {}) => {
|
|||||||
stdout: undefined,
|
stdout: undefined,
|
||||||
stderr: undefined,
|
stderr: undefined,
|
||||||
stdio,
|
stdio,
|
||||||
shell: false
|
shell: false,
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
};
|
}
|
||||||
|
|||||||
70
node_modules/execa/lib/command.js
generated
vendored
70
node_modules/execa/lib/command.js
generated
vendored
@@ -1,4 +1,6 @@
|
|||||||
'use strict';
|
import {Buffer} from 'node:buffer';
|
||||||
|
import {ChildProcess} from 'node:child_process';
|
||||||
|
|
||||||
const normalizeArgs = (file, args = []) => {
|
const normalizeArgs = (file, args = []) => {
|
||||||
if (!Array.isArray(args)) {
|
if (!Array.isArray(args)) {
|
||||||
return [file];
|
return [file];
|
||||||
@@ -18,18 +20,14 @@ const escapeArg = arg => {
|
|||||||
return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`;
|
return `"${arg.replace(DOUBLE_QUOTES_REGEXP, '\\"')}"`;
|
||||||
};
|
};
|
||||||
|
|
||||||
const joinCommand = (file, args) => {
|
export const joinCommand = (file, args) => normalizeArgs(file, args).join(' ');
|
||||||
return normalizeArgs(file, args).join(' ');
|
|
||||||
};
|
|
||||||
|
|
||||||
const getEscapedCommand = (file, args) => {
|
export const getEscapedCommand = (file, args) => normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
|
||||||
return normalizeArgs(file, args).map(arg => escapeArg(arg)).join(' ');
|
|
||||||
};
|
|
||||||
|
|
||||||
const SPACES_REGEXP = / +/g;
|
const SPACES_REGEXP = / +/g;
|
||||||
|
|
||||||
// Handle `execa.command()`
|
// Handle `execaCommand()`
|
||||||
const parseCommand = command => {
|
export const parseCommand = command => {
|
||||||
const tokens = [];
|
const tokens = [];
|
||||||
for (const token of command.trim().split(SPACES_REGEXP)) {
|
for (const token of command.trim().split(SPACES_REGEXP)) {
|
||||||
// Allow spaces to be escaped by a backslash if not meant as a delimiter
|
// Allow spaces to be escaped by a backslash if not meant as a delimiter
|
||||||
@@ -45,8 +43,54 @@ const parseCommand = command => {
|
|||||||
return tokens;
|
return tokens;
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
const parseExpression = expression => {
|
||||||
joinCommand,
|
const typeOfExpression = typeof expression;
|
||||||
getEscapedCommand,
|
|
||||||
parseCommand
|
if (typeOfExpression === 'string') {
|
||||||
|
return expression;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeOfExpression === 'number') {
|
||||||
|
return String(expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
typeOfExpression === 'object'
|
||||||
|
&& expression !== null
|
||||||
|
&& !(expression instanceof ChildProcess)
|
||||||
|
&& 'stdout' in expression
|
||||||
|
) {
|
||||||
|
const typeOfStdout = typeof expression.stdout;
|
||||||
|
|
||||||
|
if (typeOfStdout === 'string') {
|
||||||
|
return expression.stdout;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Buffer.isBuffer(expression.stdout)) {
|
||||||
|
return expression.stdout.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(`Unexpected "${typeOfStdout}" stdout in template expression`);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new TypeError(`Unexpected "${typeOfExpression}" in template expression`);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const parseTemplate = (template, index, templates, expressions) => {
|
||||||
|
const templateString = template ?? templates.raw[index];
|
||||||
|
const templateTokens = templateString.split(SPACES_REGEXP).filter(Boolean);
|
||||||
|
|
||||||
|
if (index === expressions.length) {
|
||||||
|
return templateTokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
const expression = expressions[index];
|
||||||
|
|
||||||
|
return Array.isArray(expression)
|
||||||
|
? [...templateTokens, ...expression.map(expression => parseExpression(expression))]
|
||||||
|
: [...templateTokens, parseExpression(expression)];
|
||||||
|
};
|
||||||
|
|
||||||
|
export const parseTemplates = (templates, expressions) => templates.flatMap(
|
||||||
|
(template, index) => parseTemplate(template, index, templates, expressions),
|
||||||
|
);
|
||||||
|
|||||||
9
node_modules/execa/lib/error.js
generated
vendored
9
node_modules/execa/lib/error.js
generated
vendored
@@ -1,5 +1,4 @@
|
|||||||
'use strict';
|
import {signalsByName} from 'human-signals';
|
||||||
const {signalsByName} = require('human-signals');
|
|
||||||
|
|
||||||
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
|
const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription, exitCode, isCanceled}) => {
|
||||||
if (timedOut) {
|
if (timedOut) {
|
||||||
@@ -25,7 +24,7 @@ const getErrorPrefix = ({timedOut, timeout, errorCode, signal, signalDescription
|
|||||||
return 'failed';
|
return 'failed';
|
||||||
};
|
};
|
||||||
|
|
||||||
const makeError = ({
|
export const makeError = ({
|
||||||
stdout,
|
stdout,
|
||||||
stderr,
|
stderr,
|
||||||
all,
|
all,
|
||||||
@@ -37,7 +36,7 @@ const makeError = ({
|
|||||||
timedOut,
|
timedOut,
|
||||||
isCanceled,
|
isCanceled,
|
||||||
killed,
|
killed,
|
||||||
parsed: {options: {timeout}}
|
parsed: {options: {timeout}},
|
||||||
}) => {
|
}) => {
|
||||||
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
|
// `signal` and `exitCode` emitted on `spawned.on('exit')` event can be `null`.
|
||||||
// We normalize them to `undefined`
|
// We normalize them to `undefined`
|
||||||
@@ -84,5 +83,3 @@ const makeError = ({
|
|||||||
|
|
||||||
return error;
|
return error;
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = makeError;
|
|
||||||
|
|||||||
33
node_modules/execa/lib/kill.js
generated
vendored
33
node_modules/execa/lib/kill.js
generated
vendored
@@ -1,11 +1,10 @@
|
|||||||
'use strict';
|
import os from 'node:os';
|
||||||
const os = require('os');
|
import onExit from 'signal-exit';
|
||||||
const onExit = require('signal-exit');
|
|
||||||
|
|
||||||
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
|
const DEFAULT_FORCE_KILL_TIMEOUT = 1000 * 5;
|
||||||
|
|
||||||
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
|
// Monkey-patches `childProcess.kill()` to add `forceKillAfterTimeout` behavior
|
||||||
const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => {
|
export const spawnedKill = (kill, signal = 'SIGTERM', options = {}) => {
|
||||||
const killResult = kill(signal);
|
const killResult = kill(signal);
|
||||||
setKillTimeout(kill, signal, options, killResult);
|
setKillTimeout(kill, signal, options, killResult);
|
||||||
return killResult;
|
return killResult;
|
||||||
@@ -30,14 +29,10 @@ const setKillTimeout = (kill, signal, options, killResult) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => {
|
const shouldForceKill = (signal, {forceKillAfterTimeout}, killResult) => isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
|
||||||
return isSigterm(signal) && forceKillAfterTimeout !== false && killResult;
|
|
||||||
};
|
|
||||||
|
|
||||||
const isSigterm = signal => {
|
const isSigterm = signal => signal === os.constants.signals.SIGTERM
|
||||||
return signal === os.constants.signals.SIGTERM ||
|
|| (typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
|
||||||
(typeof signal === 'string' && signal.toUpperCase() === 'SIGTERM');
|
|
||||||
};
|
|
||||||
|
|
||||||
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
|
const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
|
||||||
if (forceKillAfterTimeout === true) {
|
if (forceKillAfterTimeout === true) {
|
||||||
@@ -52,7 +47,7 @@ const getForceKillAfterTimeout = ({forceKillAfterTimeout = true}) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// `childProcess.cancel()`
|
// `childProcess.cancel()`
|
||||||
const spawnedCancel = (spawned, context) => {
|
export const spawnedCancel = (spawned, context) => {
|
||||||
const killResult = spawned.kill();
|
const killResult = spawned.kill();
|
||||||
|
|
||||||
if (killResult) {
|
if (killResult) {
|
||||||
@@ -66,7 +61,7 @@ const timeoutKill = (spawned, signal, reject) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// `timeout` option handling
|
// `timeout` option handling
|
||||||
const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
|
export const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise) => {
|
||||||
if (timeout === 0 || timeout === undefined) {
|
if (timeout === 0 || timeout === undefined) {
|
||||||
return spawnedPromise;
|
return spawnedPromise;
|
||||||
}
|
}
|
||||||
@@ -85,14 +80,14 @@ const setupTimeout = (spawned, {timeout, killSignal = 'SIGTERM'}, spawnedPromise
|
|||||||
return Promise.race([timeoutPromise, safeSpawnedPromise]);
|
return Promise.race([timeoutPromise, safeSpawnedPromise]);
|
||||||
};
|
};
|
||||||
|
|
||||||
const validateTimeout = ({timeout}) => {
|
export const validateTimeout = ({timeout}) => {
|
||||||
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
|
if (timeout !== undefined && (!Number.isFinite(timeout) || timeout < 0)) {
|
||||||
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
|
throw new TypeError(`Expected the \`timeout\` option to be a non-negative integer, got \`${timeout}\` (${typeof timeout})`);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// `cleanup` option handling
|
// `cleanup` option handling
|
||||||
const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
|
export const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
|
||||||
if (!cleanup || detached) {
|
if (!cleanup || detached) {
|
||||||
return timedPromise;
|
return timedPromise;
|
||||||
}
|
}
|
||||||
@@ -105,11 +100,3 @@ const setExitHandler = async (spawned, {cleanup, detached}, timedPromise) => {
|
|||||||
removeExitHandler();
|
removeExitHandler();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
spawnedKill,
|
|
||||||
spawnedCancel,
|
|
||||||
setupTimeout,
|
|
||||||
validateTimeout,
|
|
||||||
setExitHandler
|
|
||||||
};
|
|
||||||
|
|||||||
42
node_modules/execa/lib/pipe.js
generated
vendored
Normal file
42
node_modules/execa/lib/pipe.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import {createWriteStream} from 'node:fs';
|
||||||
|
import {ChildProcess} from 'node:child_process';
|
||||||
|
import {isWritableStream} from 'is-stream';
|
||||||
|
|
||||||
|
const isExecaChildProcess = target => target instanceof ChildProcess && typeof target.then === 'function';
|
||||||
|
|
||||||
|
const pipeToTarget = (spawned, streamName, target) => {
|
||||||
|
if (typeof target === 'string') {
|
||||||
|
spawned[streamName].pipe(createWriteStream(target));
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isWritableStream(target)) {
|
||||||
|
spawned[streamName].pipe(target);
|
||||||
|
return spawned;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isExecaChildProcess(target)) {
|
||||||
|
throw new TypeError('The second argument must be a string, a stream or an Execa child process.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isWritableStream(target.stdin)) {
|
||||||
|
throw new TypeError('The target child process\'s stdin must be available.');
|
||||||
|
}
|
||||||
|
|
||||||
|
spawned[streamName].pipe(target.stdin);
|
||||||
|
return target;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const addPipeMethods = spawned => {
|
||||||
|
if (spawned.stdout !== null) {
|
||||||
|
spawned.pipeStdout = pipeToTarget.bind(undefined, spawned, 'stdout');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spawned.stderr !== null) {
|
||||||
|
spawned.pipeStderr = pipeToTarget.bind(undefined, spawned, 'stderr');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (spawned.all !== undefined) {
|
||||||
|
spawned.pipeAll = pipeToTarget.bind(undefined, spawned, 'all');
|
||||||
|
}
|
||||||
|
};
|
||||||
48
node_modules/execa/lib/promise.js
generated
vendored
48
node_modules/execa/lib/promise.js
generated
vendored
@@ -1,46 +1,36 @@
|
|||||||
'use strict';
|
// eslint-disable-next-line unicorn/prefer-top-level-await
|
||||||
|
|
||||||
const nativePromisePrototype = (async () => {})().constructor.prototype;
|
const nativePromisePrototype = (async () => {})().constructor.prototype;
|
||||||
|
|
||||||
const descriptors = ['then', 'catch', 'finally'].map(property => [
|
const descriptors = ['then', 'catch', 'finally'].map(property => [
|
||||||
property,
|
property,
|
||||||
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property)
|
Reflect.getOwnPropertyDescriptor(nativePromisePrototype, property),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// The return value is a mixin of `childProcess` and `Promise`
|
// The return value is a mixin of `childProcess` and `Promise`
|
||||||
const mergePromise = (spawned, promise) => {
|
export const mergePromise = (spawned, promise) => {
|
||||||
for (const [property, descriptor] of descriptors) {
|
for (const [property, descriptor] of descriptors) {
|
||||||
// Starting the main `promise` is deferred to avoid consuming streams
|
// Starting the main `promise` is deferred to avoid consuming streams
|
||||||
const value = typeof promise === 'function' ?
|
const value = typeof promise === 'function'
|
||||||
(...args) => Reflect.apply(descriptor.value, promise(), args) :
|
? (...args) => Reflect.apply(descriptor.value, promise(), args)
|
||||||
descriptor.value.bind(promise);
|
: descriptor.value.bind(promise);
|
||||||
|
|
||||||
Reflect.defineProperty(spawned, property, {...descriptor, value});
|
Reflect.defineProperty(spawned, property, {...descriptor, value});
|
||||||
}
|
}
|
||||||
|
|
||||||
return spawned;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Use promises instead of `child_process` events
|
// Use promises instead of `child_process` events
|
||||||
const getSpawnedPromise = spawned => {
|
export const getSpawnedPromise = spawned => new Promise((resolve, reject) => {
|
||||||
return new Promise((resolve, reject) => {
|
spawned.on('exit', (exitCode, signal) => {
|
||||||
spawned.on('exit', (exitCode, signal) => {
|
resolve({exitCode, signal});
|
||||||
resolve({exitCode, signal});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
spawned.on('error', error => {
|
spawned.on('error', error => {
|
||||||
|
reject(error);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (spawned.stdin) {
|
||||||
|
spawned.stdin.on('error', error => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
|
}
|
||||||
if (spawned.stdin) {
|
});
|
||||||
spawned.stdin.on('error', error => {
|
|
||||||
reject(error);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
mergePromise,
|
|
||||||
getSpawnedPromise
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|||||||
7
node_modules/execa/lib/stdio.js
generated
vendored
7
node_modules/execa/lib/stdio.js
generated
vendored
@@ -1,9 +1,8 @@
|
|||||||
'use strict';
|
|
||||||
const aliases = ['stdin', 'stdout', 'stderr'];
|
const aliases = ['stdin', 'stdout', 'stderr'];
|
||||||
|
|
||||||
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
|
const hasAlias = options => aliases.some(alias => options[alias] !== undefined);
|
||||||
|
|
||||||
const normalizeStdio = options => {
|
export const normalizeStdio = options => {
|
||||||
if (!options) {
|
if (!options) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -30,10 +29,8 @@ const normalizeStdio = options => {
|
|||||||
return Array.from({length}, (value, index) => stdio[index]);
|
return Array.from({length}, (value, index) => stdio[index]);
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = normalizeStdio;
|
|
||||||
|
|
||||||
// `ipc` is pushed unless it is already present
|
// `ipc` is pushed unless it is already present
|
||||||
module.exports.node = options => {
|
export const normalizeStdioNode = options => {
|
||||||
const stdio = normalizeStdio(options);
|
const stdio = normalizeStdio(options);
|
||||||
|
|
||||||
if (stdio === 'ipc') {
|
if (stdio === 'ipc') {
|
||||||
|
|||||||
76
node_modules/execa/lib/stream.js
generated
vendored
76
node_modules/execa/lib/stream.js
generated
vendored
@@ -1,13 +1,48 @@
|
|||||||
'use strict';
|
import {createReadStream, readFileSync} from 'node:fs';
|
||||||
const isStream = require('is-stream');
|
import {isStream} from 'is-stream';
|
||||||
const getStream = require('get-stream');
|
import getStream from 'get-stream';
|
||||||
const mergeStream = require('merge-stream');
|
import mergeStream from 'merge-stream';
|
||||||
|
|
||||||
// `input` option
|
const validateInputOptions = input => {
|
||||||
const handleInput = (spawned, input) => {
|
if (input !== undefined) {
|
||||||
// Checking for stdin is workaround for https://github.com/nodejs/node/issues/26852
|
throw new TypeError('The `input` and `inputFile` options cannot be both set.');
|
||||||
// @todo remove `|| spawned.stdin === undefined` once we drop support for Node.js <=12.2.0
|
}
|
||||||
if (input === undefined || spawned.stdin === undefined) {
|
};
|
||||||
|
|
||||||
|
const getInputSync = ({input, inputFile}) => {
|
||||||
|
if (typeof inputFile !== 'string') {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
validateInputOptions(input);
|
||||||
|
return readFileSync(inputFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `input` and `inputFile` option in sync mode
|
||||||
|
export const handleInputSync = options => {
|
||||||
|
const input = getInputSync(options);
|
||||||
|
|
||||||
|
if (isStream(input)) {
|
||||||
|
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getInput = ({input, inputFile}) => {
|
||||||
|
if (typeof inputFile !== 'string') {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
validateInputOptions(input);
|
||||||
|
return createReadStream(inputFile);
|
||||||
|
};
|
||||||
|
|
||||||
|
// `input` and `inputFile` option in async mode
|
||||||
|
export const handleInput = (spawned, options) => {
|
||||||
|
const input = getInput(options);
|
||||||
|
|
||||||
|
if (input === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -19,7 +54,7 @@ const handleInput = (spawned, input) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// `all` interleaves `stdout` and `stderr`
|
// `all` interleaves `stdout` and `stderr`
|
||||||
const makeAllStream = (spawned, {all}) => {
|
export const makeAllStream = (spawned, {all}) => {
|
||||||
if (!all || (!spawned.stdout && !spawned.stderr)) {
|
if (!all || (!spawned.stdout && !spawned.stderr)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -39,7 +74,8 @@ const makeAllStream = (spawned, {all}) => {
|
|||||||
|
|
||||||
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
// On failure, `result.stdout|stderr|all` should contain the currently buffered stream
|
||||||
const getBufferedData = async (stream, streamPromise) => {
|
const getBufferedData = async (stream, streamPromise) => {
|
||||||
if (!stream) {
|
// When `buffer` is `false`, `streamPromise` is `undefined` and there is no buffered data to retrieve
|
||||||
|
if (!stream || streamPromise === undefined) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -65,7 +101,7 @@ const getStreamPromise = (stream, {encoding, buffer, maxBuffer}) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
|
// Retrieve result of child process: exit code, signal, error, streams (stdout/stderr/all)
|
||||||
const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
|
export const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuffer}, processDone) => {
|
||||||
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
|
const stdoutPromise = getStreamPromise(stdout, {encoding, buffer, maxBuffer});
|
||||||
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
|
const stderrPromise = getStreamPromise(stderr, {encoding, buffer, maxBuffer});
|
||||||
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
|
const allPromise = getStreamPromise(all, {encoding, buffer, maxBuffer: maxBuffer * 2});
|
||||||
@@ -77,21 +113,7 @@ const getSpawnedResult = async ({stdout, stderr, all}, {encoding, buffer, maxBuf
|
|||||||
{error, signal: error.signal, timedOut: error.timedOut},
|
{error, signal: error.signal, timedOut: error.timedOut},
|
||||||
getBufferedData(stdout, stdoutPromise),
|
getBufferedData(stdout, stdoutPromise),
|
||||||
getBufferedData(stderr, stderrPromise),
|
getBufferedData(stderr, stderrPromise),
|
||||||
getBufferedData(all, allPromise)
|
getBufferedData(all, allPromise),
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const validateInputSync = ({input}) => {
|
|
||||||
if (isStream(input)) {
|
|
||||||
throw new TypeError('The `input` option cannot be a stream in sync mode');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
handleInput,
|
|
||||||
makeAllStream,
|
|
||||||
getSpawnedResult,
|
|
||||||
validateInputSync
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|||||||
19
node_modules/execa/lib/verbose.js
generated
vendored
Normal file
19
node_modules/execa/lib/verbose.js
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import {debuglog} from 'node:util';
|
||||||
|
import process from 'node:process';
|
||||||
|
|
||||||
|
export const verboseDefault = debuglog('execa').enabled;
|
||||||
|
|
||||||
|
const padField = (field, padding) => String(field).padStart(padding, '0');
|
||||||
|
|
||||||
|
const getTimestamp = () => {
|
||||||
|
const date = new Date();
|
||||||
|
return `${padField(date.getHours(), 2)}:${padField(date.getMinutes(), 2)}:${padField(date.getSeconds(), 2)}.${padField(date.getMilliseconds(), 3)}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const logCommand = (escapedCommand, {verbose}) => {
|
||||||
|
if (!verbose) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stderr.write(`[${getTimestamp()}] ${escapedCommand}\n`);
|
||||||
|
};
|
||||||
50
node_modules/execa/package.json
generated
vendored
50
node_modules/execa/package.json
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "execa",
|
"name": "execa",
|
||||||
"version": "5.1.1",
|
"version": "7.1.0",
|
||||||
"description": "Process execution for humans",
|
"description": "Process execution for humans",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": "sindresorhus/execa",
|
"repository": "sindresorhus/execa",
|
||||||
@@ -10,11 +10,13 @@
|
|||||||
"email": "sindresorhus@gmail.com",
|
"email": "sindresorhus@gmail.com",
|
||||||
"url": "https://sindresorhus.com"
|
"url": "https://sindresorhus.com"
|
||||||
},
|
},
|
||||||
|
"type": "module",
|
||||||
|
"exports": "./index.js",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=10"
|
"node": "^14.18.0 || ^16.14.0 || >=18.0.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "xo && nyc ava && tsd"
|
"test": "xo && c8 ava && tsd"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"index.js",
|
"index.js",
|
||||||
@@ -40,27 +42,28 @@
|
|||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"cross-spawn": "^7.0.3",
|
"cross-spawn": "^7.0.3",
|
||||||
"get-stream": "^6.0.0",
|
"get-stream": "^6.0.1",
|
||||||
"human-signals": "^2.1.0",
|
"human-signals": "^4.3.0",
|
||||||
"is-stream": "^2.0.0",
|
"is-stream": "^3.0.0",
|
||||||
"merge-stream": "^2.0.0",
|
"merge-stream": "^2.0.0",
|
||||||
"npm-run-path": "^4.0.1",
|
"npm-run-path": "^5.1.0",
|
||||||
"onetime": "^5.1.2",
|
"onetime": "^6.0.0",
|
||||||
"signal-exit": "^3.0.3",
|
"signal-exit": "^3.0.7",
|
||||||
"strip-final-newline": "^2.0.0"
|
"strip-final-newline": "^3.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^14.14.10",
|
"@types/node": "^18.13.0",
|
||||||
"ava": "^2.4.0",
|
"ava": "^5.2.0",
|
||||||
"get-node": "^11.0.1",
|
"c8": "^7.12.0",
|
||||||
|
"get-node": "^13.5.0",
|
||||||
"is-running": "^2.1.0",
|
"is-running": "^2.1.0",
|
||||||
"nyc": "^15.1.0",
|
"p-event": "^5.0.1",
|
||||||
"p-event": "^4.2.0",
|
"path-key": "^4.0.0",
|
||||||
"tempfile": "^3.0.0",
|
"tempfile": "^4.0.0",
|
||||||
"tsd": "^0.13.1",
|
"tsd": "^0.25.0",
|
||||||
"xo": "^0.35.0"
|
"xo": "^0.53.1"
|
||||||
},
|
},
|
||||||
"nyc": {
|
"c8": {
|
||||||
"reporter": [
|
"reporter": [
|
||||||
"text",
|
"text",
|
||||||
"lcov"
|
"lcov"
|
||||||
@@ -70,5 +73,14 @@
|
|||||||
"**/test.js",
|
"**/test.js",
|
||||||
"**/test/**"
|
"**/test/**"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
"ava": {
|
||||||
|
"workerThreads": false
|
||||||
|
},
|
||||||
|
"xo": {
|
||||||
|
"rules": {
|
||||||
|
"unicorn/no-empty-file": "off",
|
||||||
|
"@typescript-eslint/ban-types": "off"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
441
node_modules/execa/readme.md
generated
vendored
441
node_modules/execa/readme.md
generated
vendored
@@ -1,4 +1,7 @@
|
|||||||
<img src="media/logo.svg" width="400">
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="media/logo_dark.svg">
|
||||||
|
<img alt="execa logo" src="media/logo.svg" width="400">
|
||||||
|
</picture>
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
[](https://codecov.io/gh/sindresorhus/execa)
|
[](https://codecov.io/gh/sindresorhus/execa)
|
||||||
@@ -9,123 +12,173 @@
|
|||||||
|
|
||||||
This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with:
|
This package improves [`child_process`](https://nodejs.org/api/child_process.html) methods with:
|
||||||
|
|
||||||
- Promise interface.
|
- [Promise interface](#execacommandcommand-options).
|
||||||
|
- [Scripts interface](#scripts-interface), like `zx`.
|
||||||
|
- Improved [Windows support](https://github.com/IndigoUnited/node-cross-spawn#why), including [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries.
|
||||||
|
- Executes [locally installed binaries](#preferlocal) without `npx`.
|
||||||
|
- [Cleans up](#cleanup) child processes when the parent process ends.
|
||||||
|
- [Graceful termination](#optionsforcekillaftertimeout).
|
||||||
|
- Get [interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal.
|
||||||
- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`.
|
- [Strips the final newline](#stripfinalnewline) from the output so you don't have to do `stdout.trim()`.
|
||||||
- Supports [shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) binaries cross-platform.
|
- Convenience methods to pipe processes' [input](#input) and [output](#redirect-output-to-a-file).
|
||||||
- [Improved Windows support.](https://github.com/IndigoUnited/node-cross-spawn#why)
|
- Can specify file and arguments [as a single string](#execacommandcommand-options) without a shell.
|
||||||
- Higher max buffer. 100 MB instead of 200 KB.
|
- [Verbose mode](#verbose-mode) for debugging.
|
||||||
- [Executes locally installed binaries by name.](#preferlocal)
|
|
||||||
- [Cleans up spawned processes when the parent process dies.](#cleanup)
|
|
||||||
- [Get interleaved output](#all) from `stdout` and `stderr` similar to what is printed on the terminal. [*(Async only)*](#execasyncfile-arguments-options)
|
|
||||||
- [Can specify file and arguments as a single string without a shell](#execacommandcommand-options)
|
|
||||||
- More descriptive errors.
|
- More descriptive errors.
|
||||||
|
- Higher max buffer: 100 MB instead of 1 MB.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
```
|
```sh
|
||||||
$ npm install execa
|
npm install execa
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```js
|
### Promise interface
|
||||||
const execa = require('execa');
|
|
||||||
|
|
||||||
(async () => {
|
```js
|
||||||
const {stdout} = await execa('echo', ['unicorns']);
|
import {execa} from 'execa';
|
||||||
console.log(stdout);
|
|
||||||
//=> 'unicorns'
|
const {stdout} = await execa('echo', ['unicorns']);
|
||||||
})();
|
console.log(stdout);
|
||||||
|
//=> 'unicorns'
|
||||||
```
|
```
|
||||||
|
|
||||||
### Pipe the child process stdout to the parent
|
### Scripts interface
|
||||||
|
|
||||||
|
For more information about Execa scripts, please see [this page](docs/scripts.md).
|
||||||
|
|
||||||
|
#### Basic
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const execa = require('execa');
|
import {$} from 'execa';
|
||||||
|
|
||||||
execa('echo', ['unicorns']).stdout.pipe(process.stdout);
|
const branch = await $`git branch --show-current`;
|
||||||
|
await $`dep deploy --branch=${branch}`;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Multiple arguments
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {$} from 'execa';
|
||||||
|
|
||||||
|
const args = ['unicorns', '&', 'rainbows!'];
|
||||||
|
const {stdout} = await $`echo ${args}`;
|
||||||
|
console.log(stdout);
|
||||||
|
//=> 'unicorns & rainbows!'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With options
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {$} from 'execa';
|
||||||
|
|
||||||
|
await $({stdio: 'inherit'})`echo unicorns`;
|
||||||
|
//=> 'unicorns'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Shared options
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {$} from 'execa';
|
||||||
|
|
||||||
|
const $$ = $({stdio: 'inherit'});
|
||||||
|
|
||||||
|
await $$`echo unicorns`;
|
||||||
|
//=> 'unicorns'
|
||||||
|
|
||||||
|
await $$`echo rainbows`;
|
||||||
|
//=> 'rainbows'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Verbose mode
|
||||||
|
|
||||||
|
```sh
|
||||||
|
> node file.js
|
||||||
|
unicorns
|
||||||
|
rainbows
|
||||||
|
|
||||||
|
> NODE_DEBUG=execa node file.js
|
||||||
|
[16:50:03.305] echo unicorns
|
||||||
|
unicorns
|
||||||
|
[16:50:03.308] echo rainbows
|
||||||
|
rainbows
|
||||||
|
```
|
||||||
|
|
||||||
|
### Input/output
|
||||||
|
|
||||||
|
#### Redirect output to a file
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {execa} from 'execa';
|
||||||
|
|
||||||
|
// Similar to `echo unicorns > stdout.txt` in Bash
|
||||||
|
await execa('echo', ['unicorns']).pipeStdout('stdout.txt');
|
||||||
|
|
||||||
|
// Similar to `echo unicorns 2> stdout.txt` in Bash
|
||||||
|
await execa('echo', ['unicorns']).pipeStderr('stderr.txt');
|
||||||
|
|
||||||
|
// Similar to `echo unicorns &> stdout.txt` in Bash
|
||||||
|
await execa('echo', ['unicorns'], {all: true}).pipeAll('all.txt');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Redirect input from a file
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {execa} from 'execa';
|
||||||
|
|
||||||
|
// Similar to `cat < stdin.txt` in Bash
|
||||||
|
const {stdout} = await execa('cat', {inputFile: 'stdin.txt'});
|
||||||
|
console.log(stdout);
|
||||||
|
//=> 'unicorns'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Save and pipe output from a child process
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {execa} from 'execa';
|
||||||
|
|
||||||
|
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(process.stdout);
|
||||||
|
// Prints `unicorns`
|
||||||
|
console.log(stdout);
|
||||||
|
// Also returns 'unicorns'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Pipe multiple processes
|
||||||
|
|
||||||
|
```js
|
||||||
|
import {execa} from 'execa';
|
||||||
|
|
||||||
|
// Similar to `echo unicorns | cat` in Bash
|
||||||
|
const {stdout} = await execa('echo', ['unicorns']).pipeStdout(execa('cat'));
|
||||||
|
console.log(stdout);
|
||||||
|
//=> 'unicorns'
|
||||||
```
|
```
|
||||||
|
|
||||||
### Handling Errors
|
### Handling Errors
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const execa = require('execa');
|
import {execa} from 'execa';
|
||||||
|
|
||||||
(async () => {
|
// Catching an error
|
||||||
// Catching an error
|
|
||||||
try {
|
|
||||||
await execa('unknown', ['command']);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error);
|
|
||||||
/*
|
|
||||||
{
|
|
||||||
message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
|
|
||||||
errno: -2,
|
|
||||||
code: 'ENOENT',
|
|
||||||
syscall: 'spawn unknown',
|
|
||||||
path: 'unknown',
|
|
||||||
spawnargs: ['command'],
|
|
||||||
originalMessage: 'spawn unknown ENOENT',
|
|
||||||
shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
|
|
||||||
command: 'unknown command',
|
|
||||||
escapedCommand: 'unknown command',
|
|
||||||
stdout: '',
|
|
||||||
stderr: '',
|
|
||||||
all: '',
|
|
||||||
failed: true,
|
|
||||||
timedOut: false,
|
|
||||||
isCanceled: false,
|
|
||||||
killed: false
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
})();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cancelling a spawned process
|
|
||||||
|
|
||||||
```js
|
|
||||||
const execa = require('execa');
|
|
||||||
|
|
||||||
(async () => {
|
|
||||||
const subprocess = execa('node');
|
|
||||||
|
|
||||||
setTimeout(() => {
|
|
||||||
subprocess.cancel();
|
|
||||||
}, 1000);
|
|
||||||
|
|
||||||
try {
|
|
||||||
await subprocess;
|
|
||||||
} catch (error) {
|
|
||||||
console.log(subprocess.killed); // true
|
|
||||||
console.log(error.isCanceled); // true
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
```
|
|
||||||
|
|
||||||
### Catching an error with the sync method
|
|
||||||
|
|
||||||
```js
|
|
||||||
try {
|
try {
|
||||||
execa.sync('unknown', ['command']);
|
await execa('unknown', ['command']);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.log(error);
|
console.log(error);
|
||||||
/*
|
/*
|
||||||
{
|
{
|
||||||
message: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
|
message: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
|
||||||
errno: -2,
|
errno: -2,
|
||||||
code: 'ENOENT',
|
code: 'ENOENT',
|
||||||
syscall: 'spawnSync unknown',
|
syscall: 'spawn unknown',
|
||||||
path: 'unknown',
|
path: 'unknown',
|
||||||
spawnargs: ['command'],
|
spawnargs: ['command'],
|
||||||
originalMessage: 'spawnSync unknown ENOENT',
|
originalMessage: 'spawn unknown ENOENT',
|
||||||
shortMessage: 'Command failed with ENOENT: unknown command spawnSync unknown ENOENT',
|
shortMessage: 'Command failed with ENOENT: unknown command spawn unknown ENOENT',
|
||||||
command: 'unknown command',
|
command: 'unknown command',
|
||||||
escapedCommand: 'unknown command',
|
escapedCommand: 'unknown command',
|
||||||
stdout: '',
|
stdout: '',
|
||||||
stderr: '',
|
stderr: '',
|
||||||
all: '',
|
|
||||||
failed: true,
|
failed: true,
|
||||||
timedOut: false,
|
timedOut: false,
|
||||||
isCanceled: false,
|
isCanceled: false,
|
||||||
@@ -135,7 +188,7 @@ try {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Kill a process
|
### Graceful termination
|
||||||
|
|
||||||
Using SIGTERM, and after 2 seconds, kill it with SIGKILL.
|
Using SIGTERM, and after 2 seconds, kill it with SIGKILL.
|
||||||
|
|
||||||
@@ -151,17 +204,84 @@ setTimeout(() => {
|
|||||||
|
|
||||||
## API
|
## API
|
||||||
|
|
||||||
### execa(file, arguments, options?)
|
### Methods
|
||||||
|
|
||||||
Execute a file. Think of this as a mix of [`child_process.execFile()`](https://nodejs.org/api/child_process.html#child_process_child_process_execfile_file_args_options_callback) and [`child_process.spawn()`](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options).
|
#### execa(file, arguments?, options?)
|
||||||
|
|
||||||
No escaping/quoting is needed.
|
Executes a command using `file ...arguments`. `arguments` are specified as an array of strings. Returns a [`childProcess`](#childprocess).
|
||||||
|
|
||||||
Unless the [`shell`](#shell) option is used, no shell interpreter (Bash, `cmd.exe`, etc.) is used, so shell features such as variables substitution (`echo $PATH`) are not allowed.
|
Arguments are [automatically escaped](#shell-syntax). They can contain any character, including spaces.
|
||||||
|
|
||||||
Returns a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) which:
|
This is the preferred method when executing single commands.
|
||||||
- is also a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult).
|
|
||||||
- exposes the following additional methods and properties.
|
#### execaNode(scriptPath, arguments?, options?)
|
||||||
|
|
||||||
|
Executes a Node.js file using `node scriptPath ...arguments`. `arguments` are specified as an array of strings. Returns a [`childProcess`](#childprocess).
|
||||||
|
|
||||||
|
Arguments are [automatically escaped](#shell-syntax). They can contain any character, including spaces.
|
||||||
|
|
||||||
|
This is the preferred method when executing Node.js files.
|
||||||
|
|
||||||
|
Like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options):
|
||||||
|
- the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options.
|
||||||
|
- the [`shell`](#shell) option cannot be used
|
||||||
|
- an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio)
|
||||||
|
|
||||||
|
#### $\`command\`
|
||||||
|
|
||||||
|
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a [`childProcess`](#childprocess).
|
||||||
|
|
||||||
|
Arguments are [automatically escaped](#shell-syntax). They can contain any character, but spaces must use `${}` like `` $`echo ${'has space'}` ``.
|
||||||
|
|
||||||
|
This is the preferred method when executing multiple commands in a script file.
|
||||||
|
|
||||||
|
The `command` string can inject any `${value}` with the following types: string, number, [`childProcess`](#childprocess) or an array of those types. For example: `` $`echo one ${'two'} ${3} ${['four', 'five']}` ``. For `${childProcess}`, the process's `stdout` is used.
|
||||||
|
|
||||||
|
For more information, please see [this section](#scripts-interface) and [this page](docs/scripts.md).
|
||||||
|
|
||||||
|
#### $(options)
|
||||||
|
|
||||||
|
Returns a new instance of [`$`](#command) but with different default `options`. Consecutive calls are merged to previous ones.
|
||||||
|
|
||||||
|
This can be used to either:
|
||||||
|
- Set options for a specific command: `` $(options)`command` ``
|
||||||
|
- Share options for multiple commands: `` const $$ = $(options); $$`command`; $$`otherCommand`; ``
|
||||||
|
|
||||||
|
#### execaCommand(command, options?)
|
||||||
|
|
||||||
|
Executes a command. The `command` string includes both the `file` and its `arguments`. Returns a [`childProcess`](#childprocess).
|
||||||
|
|
||||||
|
Arguments are [automatically escaped](#shell-syntax). They can contain any character, but spaces must be escaped with a backslash like `execaCommand('echo has\\ space')`.
|
||||||
|
|
||||||
|
This is the preferred method when executing a user-supplied `command` string, such as in a REPL.
|
||||||
|
|
||||||
|
### execaSync(file, arguments?, options?)
|
||||||
|
|
||||||
|
Same as [`execa()`](#execacommandcommand-options) but synchronous.
|
||||||
|
|
||||||
|
Returns or throws a [`childProcessResult`](#childProcessResult).
|
||||||
|
|
||||||
|
### $.sync\`command\`
|
||||||
|
|
||||||
|
Same as [$\`command\`](#command) but synchronous.
|
||||||
|
|
||||||
|
Returns or throws a [`childProcessResult`](#childProcessResult).
|
||||||
|
|
||||||
|
### execaCommandSync(command, options?)
|
||||||
|
|
||||||
|
Same as [`execaCommand()`](#execacommand-command-options) but synchronous.
|
||||||
|
|
||||||
|
Returns or throws a [`childProcessResult`](#childProcessResult).
|
||||||
|
|
||||||
|
### Shell syntax
|
||||||
|
|
||||||
|
For all the [methods above](#methods), no shell interpreter (Bash, cmd.exe, etc.) is used unless the [`shell` option](#shell) is set. This means shell-specific characters and expressions (`$variable`, `&&`, `||`, `;`, `|`, etc.) have no special meaning and do not need to be escaped.
|
||||||
|
|
||||||
|
### childProcess
|
||||||
|
|
||||||
|
The return value of all [asynchronous methods](#methods) is both:
|
||||||
|
- a `Promise` resolving or rejecting with a [`childProcessResult`](#childProcessResult).
|
||||||
|
- a [`child_process` instance](https://nodejs.org/api/child_process.html#child_process_class_childprocess) with the following additional methods and properties.
|
||||||
|
|
||||||
#### kill(signal?, options?)
|
#### kill(signal?, options?)
|
||||||
|
|
||||||
@@ -176,10 +296,6 @@ Milliseconds to wait for the child process to terminate before sending `SIGKILL`
|
|||||||
|
|
||||||
Can be disabled with `false`.
|
Can be disabled with `false`.
|
||||||
|
|
||||||
#### cancel()
|
|
||||||
|
|
||||||
Similar to [`childProcess.kill()`](https://nodejs.org/api/child_process.html#child_process_subprocess_kill_signal). This is preferred when cancelling the child process execution as the error is more descriptive and [`childProcessResult.isCanceled`](#iscanceled) is set to `true`.
|
|
||||||
|
|
||||||
#### all
|
#### all
|
||||||
|
|
||||||
Type: `ReadableStream | undefined`
|
Type: `ReadableStream | undefined`
|
||||||
@@ -190,34 +306,28 @@ This is `undefined` if either:
|
|||||||
- the [`all` option](#all-2) is `false` (the default value)
|
- the [`all` option](#all-2) is `false` (the default value)
|
||||||
- both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio)
|
- both [`stdout`](#stdout-1) and [`stderr`](#stderr-1) options are set to [`'inherit'`, `'ipc'`, `Stream` or `integer`](https://nodejs.org/dist/latest-v6.x/docs/api/child_process.html#child_process_options_stdio)
|
||||||
|
|
||||||
### execa.sync(file, arguments?, options?)
|
#### pipeStdout(target)
|
||||||
|
|
||||||
Execute a file synchronously.
|
[Pipe](https://nodejs.org/api/stream.html#readablepipedestination-options) the child process's `stdout` to `target`, which can be:
|
||||||
|
- Another [`execa()` return value](#pipe-multiple-processes)
|
||||||
|
- A [writable stream](#save-and-pipe-output-from-a-child-process)
|
||||||
|
- A [file path string](#redirect-output-to-a-file)
|
||||||
|
|
||||||
Returns or throws a [`childProcessResult`](#childProcessResult).
|
If the `target` is another [`execa()` return value](#execacommandcommand-options), it is returned. Otherwise, the original `execa()` return value is returned. This allows chaining `pipeStdout()` then `await`ing the [final result](#childprocessresult).
|
||||||
|
|
||||||
### execa.command(command, options?)
|
The [`stdout` option](#stdout-1) must be kept as `pipe`, its default value.
|
||||||
|
|
||||||
Same as [`execa()`](#execafile-arguments-options) except both file and arguments are specified in a single `command` string. For example, `execa('echo', ['unicorns'])` is the same as `execa.command('echo unicorns')`.
|
#### pipeStderr(target)
|
||||||
|
|
||||||
If the file or an argument contains spaces, they must be escaped with backslashes. This matters especially if `command` is not a constant but a variable, for example with `__dirname` or `process.cwd()`. Except for spaces, no escaping/quoting is needed.
|
Like [`pipeStdout()`](#pipestdouttarget) but piping the child process's `stderr` instead.
|
||||||
|
|
||||||
The [`shell` option](#shell) must be used if the `command` uses shell-specific features (for example, `&&` or `||`), as opposed to being a simple `file` followed by its `arguments`.
|
The [`stderr` option](#stderr-1) must be kept as `pipe`, its default value.
|
||||||
|
|
||||||
### execa.commandSync(command, options?)
|
#### pipeAll(target)
|
||||||
|
|
||||||
Same as [`execa.command()`](#execacommand-command-options) but synchronous.
|
Combines both [`pipeStdout()`](#pipestdouttarget) and [`pipeStderr()`](#pipestderrtarget).
|
||||||
|
|
||||||
Returns or throws a [`childProcessResult`](#childProcessResult).
|
Either the [`stdout` option](#stdout-1) or the [`stderr` option](#stderr-1) must be kept as `pipe`, their default value. Also, the [`all` option](#all-2) must be set to `true`.
|
||||||
|
|
||||||
### execa.node(scriptPath, arguments?, options?)
|
|
||||||
|
|
||||||
Execute a Node.js script as a child process.
|
|
||||||
|
|
||||||
Same as `execa('node', [scriptPath, ...arguments], options)` except (like [`child_process#fork()`](https://nodejs.org/api/child_process.html#child_process_child_process_fork_modulepath_args_options)):
|
|
||||||
- the current Node version and options are used. This can be overridden using the [`nodePath`](#nodepath-for-node-only) and [`nodeOptions`](#nodeoptions-for-node-only) options.
|
|
||||||
- the [`shell`](#shell) option cannot be used
|
|
||||||
- an extra channel [`ipc`](https://nodejs.org/api/child_process.html#child_process_options_stdio) is passed to [`stdio`](#stdio)
|
|
||||||
|
|
||||||
### childProcessResult
|
### childProcessResult
|
||||||
|
|
||||||
@@ -238,16 +348,16 @@ Type: `string`
|
|||||||
|
|
||||||
The file and arguments that were run, for logging purposes.
|
The file and arguments that were run, for logging purposes.
|
||||||
|
|
||||||
This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options).
|
This is not escaped and should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execaCommand()`](#execacommandcommand-options).
|
||||||
|
|
||||||
#### escapedCommand
|
#### escapedCommand
|
||||||
|
|
||||||
Type: `string`
|
Type: `string`
|
||||||
|
|
||||||
Same as [`command`](#command) but escaped.
|
Same as [`command`](#command-1) but escaped.
|
||||||
|
|
||||||
This is meant to be copy and pasted into a shell, for debugging purposes.
|
This is meant to be copy and pasted into a shell, for debugging purposes.
|
||||||
Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execa.command()`](#execacommandcommand-options).
|
Since the escaping is fairly basic, this should not be executed directly as a process, including using [`execa()`](#execafile-arguments-options) or [`execaCommand()`](#execacommandcommand-options).
|
||||||
|
|
||||||
#### exitCode
|
#### exitCode
|
||||||
|
|
||||||
@@ -275,7 +385,7 @@ The output of the process with `stdout` and `stderr` interleaved.
|
|||||||
|
|
||||||
This is `undefined` if either:
|
This is `undefined` if either:
|
||||||
- the [`all` option](#all-2) is `false` (the default value)
|
- the [`all` option](#all-2) is `false` (the default value)
|
||||||
- `execa.sync()` was used
|
- `execaSync()` was used
|
||||||
|
|
||||||
#### failed
|
#### failed
|
||||||
|
|
||||||
@@ -295,6 +405,8 @@ Type: `boolean`
|
|||||||
|
|
||||||
Whether the process was canceled.
|
Whether the process was canceled.
|
||||||
|
|
||||||
|
You can cancel the spawned process using the [`signal`](#signal-1) option.
|
||||||
|
|
||||||
#### killed
|
#### killed
|
||||||
|
|
||||||
Type: `boolean`
|
Type: `boolean`
|
||||||
@@ -355,14 +467,14 @@ Kill the spawned process when the parent process exits unless either:
|
|||||||
#### preferLocal
|
#### preferLocal
|
||||||
|
|
||||||
Type: `boolean`\
|
Type: `boolean`\
|
||||||
Default: `false`
|
Default: `true` with [`$`](#command)/[`$.sync`](#synccommand), `false` otherwise
|
||||||
|
|
||||||
Prefer locally installed binaries when looking for a binary to execute.\
|
Prefer locally installed binaries when looking for a binary to execute.\
|
||||||
If you `$ npm install foo`, you can then `execa('foo')`.
|
If you `$ npm install foo`, you can then `execa('foo')`.
|
||||||
|
|
||||||
#### localDir
|
#### localDir
|
||||||
|
|
||||||
Type: `string`\
|
Type: `string | URL`\
|
||||||
Default: `process.cwd()`
|
Default: `process.cwd()`
|
||||||
|
|
||||||
Preferred path to find locally installed binaries in (use with `preferLocal`).
|
Preferred path to find locally installed binaries in (use with `preferLocal`).
|
||||||
@@ -396,6 +508,16 @@ Type: `string | Buffer | stream.Readable`
|
|||||||
Write some input to the `stdin` of your binary.\
|
Write some input to the `stdin` of your binary.\
|
||||||
Streams are not allowed when using the synchronous methods.
|
Streams are not allowed when using the synchronous methods.
|
||||||
|
|
||||||
|
If the input is a file, use the [`inputFile` option](#inputfile) instead.
|
||||||
|
|
||||||
|
#### inputFile
|
||||||
|
|
||||||
|
Type: `string`
|
||||||
|
|
||||||
|
Use a file as input to the the `stdin` of your binary.
|
||||||
|
|
||||||
|
If the input is not a file, use the [`input` option](#input) instead.
|
||||||
|
|
||||||
#### stdin
|
#### stdin
|
||||||
|
|
||||||
Type: `string | number | Stream | undefined`\
|
Type: `string | number | Stream | undefined`\
|
||||||
@@ -451,7 +573,7 @@ Execa also accepts the below options which are the same as the options for [`chi
|
|||||||
|
|
||||||
#### cwd
|
#### cwd
|
||||||
|
|
||||||
Type: `string`\
|
Type: `string | URL`\
|
||||||
Default: `process.cwd()`
|
Default: `process.cwd()`
|
||||||
|
|
||||||
Current working directory of the child process.
|
Current working directory of the child process.
|
||||||
@@ -481,12 +603,10 @@ Child's [stdio](https://nodejs.org/api/child_process.html#child_process_options_
|
|||||||
Type: `string`\
|
Type: `string`\
|
||||||
Default: `'json'`
|
Default: `'json'`
|
||||||
|
|
||||||
Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execa.node()`](#execanodescriptpath-arguments-options):
|
Specify the kind of serialization used for sending messages between processes when using the [`stdio: 'ipc'`](#stdio) option or [`execaNode()`](#execanodescriptpath-arguments-options):
|
||||||
- `json`: Uses `JSON.stringify()` and `JSON.parse()`.
|
- `json`: Uses `JSON.stringify()` and `JSON.parse()`.
|
||||||
- `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value)
|
- `advanced`: Uses [`v8.serialize()`](https://nodejs.org/api/v8.html#v8_v8_serialize_value)
|
||||||
|
|
||||||
Requires Node.js `13.2.0` or later.
|
|
||||||
|
|
||||||
[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization)
|
[More info.](https://nodejs.org/api/child_process.html#child_process_advanced_serialization)
|
||||||
|
|
||||||
#### detached
|
#### detached
|
||||||
@@ -547,6 +667,16 @@ Default: `SIGTERM`
|
|||||||
|
|
||||||
Signal value to be used when the spawned process will be killed.
|
Signal value to be used when the spawned process will be killed.
|
||||||
|
|
||||||
|
#### signal
|
||||||
|
|
||||||
|
Type: [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
|
||||||
|
|
||||||
|
You can abort the spawned process using [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController).
|
||||||
|
|
||||||
|
When `AbortController.abort()` is called, [`.isCanceled`](#iscanceled) becomes `false`.
|
||||||
|
|
||||||
|
*Requires Node.js 16 or later.*
|
||||||
|
|
||||||
#### windowsVerbatimArguments
|
#### windowsVerbatimArguments
|
||||||
|
|
||||||
Type: `boolean`\
|
Type: `boolean`\
|
||||||
@@ -561,6 +691,15 @@ Default: `true`
|
|||||||
|
|
||||||
On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows.
|
On Windows, do not create a new console window. Please note this also prevents `CTRL-C` [from working](https://github.com/nodejs/node/issues/29837) on Windows.
|
||||||
|
|
||||||
|
#### verbose
|
||||||
|
|
||||||
|
Type: `boolean`\
|
||||||
|
Default: `false`
|
||||||
|
|
||||||
|
[Print each command](#verbose-mode) on `stderr` before executing it.
|
||||||
|
|
||||||
|
This can also be enabled by setting the `NODE_DEBUG=execa` environment variable in the current process.
|
||||||
|
|
||||||
#### nodePath *(For `.node()` only)*
|
#### nodePath *(For `.node()` only)*
|
||||||
|
|
||||||
Type: `string`\
|
Type: `string`\
|
||||||
@@ -582,59 +721,43 @@ List of [CLI options](https://nodejs.org/api/cli.html#cli_options) passed to the
|
|||||||
Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package:
|
Gracefully handle failures by using automatic retries and exponential backoff with the [`p-retry`](https://github.com/sindresorhus/p-retry) package:
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const pRetry = require('p-retry');
|
import pRetry from 'p-retry';
|
||||||
|
|
||||||
const run = async () => {
|
const run = async () => {
|
||||||
const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']);
|
const results = await execa('curl', ['-sSL', 'https://sindresorhus.com/unicorn']);
|
||||||
return results;
|
return results;
|
||||||
};
|
};
|
||||||
|
|
||||||
(async () => {
|
console.log(await pRetry(run, {retries: 5}));
|
||||||
console.log(await pRetry(run, {retries: 5}));
|
|
||||||
})();
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Save and pipe output from a child process
|
### Cancelling a spawned process
|
||||||
|
|
||||||
Let's say you want to show the output of a child process in real-time while also saving it to a variable.
|
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const execa = require('execa');
|
import {execa} from 'execa';
|
||||||
|
|
||||||
const subprocess = execa('echo', ['foo']);
|
const abortController = new AbortController();
|
||||||
subprocess.stdout.pipe(process.stdout);
|
const subprocess = execa('node', [], {signal: abortController.signal});
|
||||||
|
|
||||||
(async () => {
|
setTimeout(() => {
|
||||||
const {stdout} = await subprocess;
|
abortController.abort();
|
||||||
console.log('child output:', stdout);
|
}, 1000);
|
||||||
})();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Redirect output to a file
|
try {
|
||||||
|
await subprocess;
|
||||||
```js
|
} catch (error) {
|
||||||
const execa = require('execa');
|
console.log(subprocess.killed); // true
|
||||||
|
console.log(error.isCanceled); // true
|
||||||
const subprocess = execa('echo', ['foo'])
|
}
|
||||||
subprocess.stdout.pipe(fs.createWriteStream('stdout.txt'))
|
|
||||||
```
|
|
||||||
|
|
||||||
### Redirect input from a file
|
|
||||||
|
|
||||||
```js
|
|
||||||
const execa = require('execa');
|
|
||||||
|
|
||||||
const subprocess = execa('cat')
|
|
||||||
fs.createReadStream('stdin.txt').pipe(subprocess.stdin)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Execute the current package's binary
|
### Execute the current package's binary
|
||||||
|
|
||||||
```js
|
```js
|
||||||
const {getBinPathSync} = require('get-bin-path');
|
import {getBinPath} from 'get-bin-path';
|
||||||
|
|
||||||
const binPath = getBinPathSync();
|
const binPath = await getBinPath();
|
||||||
const subprocess = execa(binPath);
|
await execa(binPath);
|
||||||
```
|
```
|
||||||
|
|
||||||
`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up.
|
`execa` can be combined with [`get-bin-path`](https://github.com/ehmicky/get-bin-path) to test the current package's binary. As opposed to hard-coding the path to the binary, this validates that the `package.json` `bin` field is correctly set up.
|
||||||
|
|||||||
0
node_modules/execa/node_modules/get-stream/license → node_modules/get-stream/license
generated
vendored
0
node_modules/execa/node_modules/get-stream/license → node_modules/get-stream/license
generated
vendored
2
node_modules/glob/LICENSE
generated
vendored
2
node_modules/glob/LICENSE
generated
vendored
@@ -1,6 +1,6 @@
|
|||||||
The ISC License
|
The ISC License
|
||||||
|
|
||||||
Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors
|
Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
|||||||
1276
node_modules/glob/README.md
generated
vendored
1276
node_modules/glob/README.md
generated
vendored
File diff suppressed because it is too large
Load Diff
238
node_modules/glob/common.js
generated
vendored
238
node_modules/glob/common.js
generated
vendored
@@ -1,238 +0,0 @@
|
|||||||
exports.setopts = setopts
|
|
||||||
exports.ownProp = ownProp
|
|
||||||
exports.makeAbs = makeAbs
|
|
||||||
exports.finish = finish
|
|
||||||
exports.mark = mark
|
|
||||||
exports.isIgnored = isIgnored
|
|
||||||
exports.childrenIgnored = childrenIgnored
|
|
||||||
|
|
||||||
function ownProp (obj, field) {
|
|
||||||
return Object.prototype.hasOwnProperty.call(obj, field)
|
|
||||||
}
|
|
||||||
|
|
||||||
var fs = require("fs")
|
|
||||||
var path = require("path")
|
|
||||||
var minimatch = require("minimatch")
|
|
||||||
var isAbsolute = require("path-is-absolute")
|
|
||||||
var Minimatch = minimatch.Minimatch
|
|
||||||
|
|
||||||
function alphasort (a, b) {
|
|
||||||
return a.localeCompare(b, 'en')
|
|
||||||
}
|
|
||||||
|
|
||||||
function setupIgnores (self, options) {
|
|
||||||
self.ignore = options.ignore || []
|
|
||||||
|
|
||||||
if (!Array.isArray(self.ignore))
|
|
||||||
self.ignore = [self.ignore]
|
|
||||||
|
|
||||||
if (self.ignore.length) {
|
|
||||||
self.ignore = self.ignore.map(ignoreMap)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore patterns are always in dot:true mode.
|
|
||||||
function ignoreMap (pattern) {
|
|
||||||
var gmatcher = null
|
|
||||||
if (pattern.slice(-3) === '/**') {
|
|
||||||
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
|
||||||
gmatcher = new Minimatch(gpattern, { dot: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
matcher: new Minimatch(pattern, { dot: true }),
|
|
||||||
gmatcher: gmatcher
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function setopts (self, pattern, options) {
|
|
||||||
if (!options)
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
// base-matching: just use globstar for that.
|
|
||||||
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
|
||||||
if (options.noglobstar) {
|
|
||||||
throw new Error("base matching requires globstar")
|
|
||||||
}
|
|
||||||
pattern = "**/" + pattern
|
|
||||||
}
|
|
||||||
|
|
||||||
self.silent = !!options.silent
|
|
||||||
self.pattern = pattern
|
|
||||||
self.strict = options.strict !== false
|
|
||||||
self.realpath = !!options.realpath
|
|
||||||
self.realpathCache = options.realpathCache || Object.create(null)
|
|
||||||
self.follow = !!options.follow
|
|
||||||
self.dot = !!options.dot
|
|
||||||
self.mark = !!options.mark
|
|
||||||
self.nodir = !!options.nodir
|
|
||||||
if (self.nodir)
|
|
||||||
self.mark = true
|
|
||||||
self.sync = !!options.sync
|
|
||||||
self.nounique = !!options.nounique
|
|
||||||
self.nonull = !!options.nonull
|
|
||||||
self.nosort = !!options.nosort
|
|
||||||
self.nocase = !!options.nocase
|
|
||||||
self.stat = !!options.stat
|
|
||||||
self.noprocess = !!options.noprocess
|
|
||||||
self.absolute = !!options.absolute
|
|
||||||
self.fs = options.fs || fs
|
|
||||||
|
|
||||||
self.maxLength = options.maxLength || Infinity
|
|
||||||
self.cache = options.cache || Object.create(null)
|
|
||||||
self.statCache = options.statCache || Object.create(null)
|
|
||||||
self.symlinks = options.symlinks || Object.create(null)
|
|
||||||
|
|
||||||
setupIgnores(self, options)
|
|
||||||
|
|
||||||
self.changedCwd = false
|
|
||||||
var cwd = process.cwd()
|
|
||||||
if (!ownProp(options, "cwd"))
|
|
||||||
self.cwd = cwd
|
|
||||||
else {
|
|
||||||
self.cwd = path.resolve(options.cwd)
|
|
||||||
self.changedCwd = self.cwd !== cwd
|
|
||||||
}
|
|
||||||
|
|
||||||
self.root = options.root || path.resolve(self.cwd, "/")
|
|
||||||
self.root = path.resolve(self.root)
|
|
||||||
if (process.platform === "win32")
|
|
||||||
self.root = self.root.replace(/\\/g, "/")
|
|
||||||
|
|
||||||
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
|
||||||
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
|
||||||
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
|
||||||
if (process.platform === "win32")
|
|
||||||
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
|
||||||
self.nomount = !!options.nomount
|
|
||||||
|
|
||||||
// disable comments and negation in Minimatch.
|
|
||||||
// Note that they are not supported in Glob itself anyway.
|
|
||||||
options.nonegate = true
|
|
||||||
options.nocomment = true
|
|
||||||
// always treat \ in patterns as escapes, not path separators
|
|
||||||
options.allowWindowsEscape = true
|
|
||||||
|
|
||||||
self.minimatch = new Minimatch(pattern, options)
|
|
||||||
self.options = self.minimatch.options
|
|
||||||
}
|
|
||||||
|
|
||||||
function finish (self) {
|
|
||||||
var nou = self.nounique
|
|
||||||
var all = nou ? [] : Object.create(null)
|
|
||||||
|
|
||||||
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
|
||||||
var matches = self.matches[i]
|
|
||||||
if (!matches || Object.keys(matches).length === 0) {
|
|
||||||
if (self.nonull) {
|
|
||||||
// do like the shell, and spit out the literal glob
|
|
||||||
var literal = self.minimatch.globSet[i]
|
|
||||||
if (nou)
|
|
||||||
all.push(literal)
|
|
||||||
else
|
|
||||||
all[literal] = true
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// had matches
|
|
||||||
var m = Object.keys(matches)
|
|
||||||
if (nou)
|
|
||||||
all.push.apply(all, m)
|
|
||||||
else
|
|
||||||
m.forEach(function (m) {
|
|
||||||
all[m] = true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!nou)
|
|
||||||
all = Object.keys(all)
|
|
||||||
|
|
||||||
if (!self.nosort)
|
|
||||||
all = all.sort(alphasort)
|
|
||||||
|
|
||||||
// at *some* point we statted all of these
|
|
||||||
if (self.mark) {
|
|
||||||
for (var i = 0; i < all.length; i++) {
|
|
||||||
all[i] = self._mark(all[i])
|
|
||||||
}
|
|
||||||
if (self.nodir) {
|
|
||||||
all = all.filter(function (e) {
|
|
||||||
var notDir = !(/\/$/.test(e))
|
|
||||||
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
|
||||||
if (notDir && c)
|
|
||||||
notDir = c !== 'DIR' && !Array.isArray(c)
|
|
||||||
return notDir
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.ignore.length)
|
|
||||||
all = all.filter(function(m) {
|
|
||||||
return !isIgnored(self, m)
|
|
||||||
})
|
|
||||||
|
|
||||||
self.found = all
|
|
||||||
}
|
|
||||||
|
|
||||||
function mark (self, p) {
|
|
||||||
var abs = makeAbs(self, p)
|
|
||||||
var c = self.cache[abs]
|
|
||||||
var m = p
|
|
||||||
if (c) {
|
|
||||||
var isDir = c === 'DIR' || Array.isArray(c)
|
|
||||||
var slash = p.slice(-1) === '/'
|
|
||||||
|
|
||||||
if (isDir && !slash)
|
|
||||||
m += '/'
|
|
||||||
else if (!isDir && slash)
|
|
||||||
m = m.slice(0, -1)
|
|
||||||
|
|
||||||
if (m !== p) {
|
|
||||||
var mabs = makeAbs(self, m)
|
|
||||||
self.statCache[mabs] = self.statCache[abs]
|
|
||||||
self.cache[mabs] = self.cache[abs]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// lotta situps...
|
|
||||||
function makeAbs (self, f) {
|
|
||||||
var abs = f
|
|
||||||
if (f.charAt(0) === '/') {
|
|
||||||
abs = path.join(self.root, f)
|
|
||||||
} else if (isAbsolute(f) || f === '') {
|
|
||||||
abs = f
|
|
||||||
} else if (self.changedCwd) {
|
|
||||||
abs = path.resolve(self.cwd, f)
|
|
||||||
} else {
|
|
||||||
abs = path.resolve(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.platform === 'win32')
|
|
||||||
abs = abs.replace(/\\/g, '/')
|
|
||||||
|
|
||||||
return abs
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
|
||||||
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
|
||||||
function isIgnored (self, path) {
|
|
||||||
if (!self.ignore.length)
|
|
||||||
return false
|
|
||||||
|
|
||||||
return self.ignore.some(function(item) {
|
|
||||||
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function childrenIgnored (self, path) {
|
|
||||||
if (!self.ignore.length)
|
|
||||||
return false
|
|
||||||
|
|
||||||
return self.ignore.some(function(item) {
|
|
||||||
return !!(item.gmatcher && item.gmatcher.match(path))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
328
node_modules/glob/dist/cjs/glob.d.ts
generated
vendored
Normal file
328
node_modules/glob/dist/cjs/glob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,328 @@
|
|||||||
|
/// <reference types="node" />
|
||||||
|
import { Minimatch } from 'minimatch';
|
||||||
|
import Minipass from 'minipass';
|
||||||
|
import { FSOption, Path, PathScurry } from 'path-scurry';
|
||||||
|
import { IgnoreLike } from './ignore.js';
|
||||||
|
import { Pattern } from './pattern.js';
|
||||||
|
export type MatchSet = Minimatch['set'];
|
||||||
|
export type GlobParts = Exclude<Minimatch['globParts'], undefined>;
|
||||||
|
/**
|
||||||
|
* A `GlobOptions` object may be provided to any of the exported methods, and
|
||||||
|
* must be provided to the `Glob` constructor.
|
||||||
|
*
|
||||||
|
* All options are optional, boolean, and false by default, unless otherwise
|
||||||
|
* noted.
|
||||||
|
*
|
||||||
|
* All resolved options are added to the Glob object as properties.
|
||||||
|
*
|
||||||
|
* If you are running many `glob` operations, you can pass a Glob object as the
|
||||||
|
* `options` argument to a subsequent operation to share the previously loaded
|
||||||
|
* cache.
|
||||||
|
*/
|
||||||
|
export interface GlobOptions {
|
||||||
|
/**
|
||||||
|
* Set to `true` to always receive absolute paths for
|
||||||
|
* matched files. Set to `false` to always return relative paths.
|
||||||
|
*
|
||||||
|
* When this option is not set, absolute paths are returned for patterns
|
||||||
|
* that are absolute, and otherwise paths are returned that are relative
|
||||||
|
* to the `cwd` setting.
|
||||||
|
*
|
||||||
|
* This does _not_ make an extra system call to get
|
||||||
|
* the realpath, it only does string path resolution.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link withFileTypes}
|
||||||
|
*/
|
||||||
|
absolute?: boolean;
|
||||||
|
/**
|
||||||
|
* Set to false to enable {@link windowsPathsNoEscape}
|
||||||
|
*
|
||||||
|
* @deprecated
|
||||||
|
*/
|
||||||
|
allowWindowsEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* The current working directory in which to search. Defaults to
|
||||||
|
* `process.cwd()`.
|
||||||
|
*
|
||||||
|
* May be eiher a string path or a `file://` URL object or string.
|
||||||
|
*/
|
||||||
|
cwd?: string | URL;
|
||||||
|
/**
|
||||||
|
* Include `.dot` files in normal matches and `globstar`
|
||||||
|
* matches. Note that an explicit dot in a portion of the pattern
|
||||||
|
* will always match dot files.
|
||||||
|
*/
|
||||||
|
dot?: boolean;
|
||||||
|
/**
|
||||||
|
* Prepend all relative path strings with `./` (or `.\` on Windows).
|
||||||
|
*
|
||||||
|
* Without this option, returned relative paths are "bare", so instead of
|
||||||
|
* returning `'./foo/bar'`, they are returned as `'foo/bar'`.
|
||||||
|
*
|
||||||
|
* Relative patterns starting with `'../'` are not prepended with `./`, even
|
||||||
|
* if this option is set.
|
||||||
|
*/
|
||||||
|
dotRelative?: boolean;
|
||||||
|
/**
|
||||||
|
* Follow symlinked directories when expanding `**`
|
||||||
|
* patterns. This can result in a lot of duplicate references in
|
||||||
|
* the presence of cyclic links, and make performance quite bad.
|
||||||
|
*
|
||||||
|
* By default, a `**` in a pattern will follow 1 symbolic link if
|
||||||
|
* it is not the first item in the pattern, or none if it is the
|
||||||
|
* first item in the pattern, following the same behavior as Bash.
|
||||||
|
*/
|
||||||
|
follow?: boolean;
|
||||||
|
/**
|
||||||
|
* string or string[], or an object with `ignore` and `ignoreChildren`
|
||||||
|
* methods.
|
||||||
|
*
|
||||||
|
* If a string or string[] is provided, then this is treated as a glob
|
||||||
|
* pattern or array of glob patterns to exclude from matches. To ignore all
|
||||||
|
* children within a directory, as well as the entry itself, append `'/**'`
|
||||||
|
* to the ignore pattern.
|
||||||
|
*
|
||||||
|
* **Note** `ignore` patterns are _always_ in `dot:true` mode, regardless of
|
||||||
|
* any other settings.
|
||||||
|
*
|
||||||
|
* If an object is provided that has `ignored(path)` and/or
|
||||||
|
* `childrenIgnored(path)` methods, then these methods will be called to
|
||||||
|
* determine whether any Path is a match or if its children should be
|
||||||
|
* traversed, respectively.
|
||||||
|
*/
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
/**
|
||||||
|
* Treat brace expansion like `{a,b}` as a "magic" pattern. Has no
|
||||||
|
* effect if {@link nobrace} is set.
|
||||||
|
*
|
||||||
|
* Only has effect on the {@link hasMagic} function.
|
||||||
|
*/
|
||||||
|
magicalBraces?: boolean;
|
||||||
|
/**
|
||||||
|
* Add a `/` character to directory matches. Note that this requires
|
||||||
|
* additional stat calls in some cases.
|
||||||
|
*/
|
||||||
|
mark?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a basename-only match if the pattern does not contain any slash
|
||||||
|
* characters. That is, `*.js` would be treated as equivalent to
|
||||||
|
* `**\/*.js`, matching all js files in all directories.
|
||||||
|
*/
|
||||||
|
matchBase?: boolean;
|
||||||
|
/**
|
||||||
|
* Limit the directory traversal to a given depth below the cwd.
|
||||||
|
* Note that this does NOT prevent traversal to sibling folders,
|
||||||
|
* root patterns, and so on. It only limits the maximum folder depth
|
||||||
|
* that the walk will descend, relative to the cwd.
|
||||||
|
*/
|
||||||
|
maxDepth?: number;
|
||||||
|
/**
|
||||||
|
* Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||||
|
*/
|
||||||
|
nobrace?: boolean;
|
||||||
|
/**
|
||||||
|
* Perform a case-insensitive match. This defaults to `true` on macOS and
|
||||||
|
* Windows systems, and `false` on all others.
|
||||||
|
*
|
||||||
|
* **Note** `nocase` should only be explicitly set when it is
|
||||||
|
* known that the filesystem's case sensitivity differs from the
|
||||||
|
* platform default. If set `true` on case-sensitive file
|
||||||
|
* systems, or `false` on case-insensitive file systems, then the
|
||||||
|
* walk may return more or less results than expected.
|
||||||
|
*/
|
||||||
|
nocase?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match directories, only files. (Note: to match
|
||||||
|
* _only_ directories, put a `/` at the end of the pattern.)
|
||||||
|
*/
|
||||||
|
nodir?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match "extglob" patterns such as `+(a|b)`.
|
||||||
|
*/
|
||||||
|
noext?: boolean;
|
||||||
|
/**
|
||||||
|
* Do not match `**` against multiple filenames. (Ie, treat it as a normal
|
||||||
|
* `*` instead.)
|
||||||
|
*
|
||||||
|
* Conflicts with {@link matchBase}
|
||||||
|
*/
|
||||||
|
noglobstar?: boolean;
|
||||||
|
/**
|
||||||
|
* Defaults to value of `process.platform` if available, or `'linux'` if
|
||||||
|
* not. Setting `platform:'win32'` on non-Windows systems may cause strange
|
||||||
|
* behavior.
|
||||||
|
*/
|
||||||
|
platform?: NodeJS.Platform;
|
||||||
|
/**
|
||||||
|
* Set to true to call `fs.realpath` on all of the
|
||||||
|
* results. In the case of an entry that cannot be resolved, the
|
||||||
|
* entry is omitted. This incurs a slight performance penalty, of
|
||||||
|
* course, because of the added system calls.
|
||||||
|
*/
|
||||||
|
realpath?: boolean;
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* A string path resolved against the `cwd` option, which
|
||||||
|
* is used as the starting point for absolute patterns that start
|
||||||
|
* with `/`, (but not drive letters or UNC paths on Windows).
|
||||||
|
*
|
||||||
|
* Note that this _doesn't_ necessarily limit the walk to the
|
||||||
|
* `root` directory, and doesn't affect the cwd starting point for
|
||||||
|
* non-absolute patterns. A pattern containing `..` will still be
|
||||||
|
* able to traverse out of the root directory, if it is not an
|
||||||
|
* actual root directory on the filesystem, and any non-absolute
|
||||||
|
* patterns will be matched in the `cwd`. For example, the
|
||||||
|
* pattern `/../*` with `{root:'/some/path'}` will return all
|
||||||
|
* files in `/some`, not all files in `/some/path`. The pattern
|
||||||
|
* `*` with `{root:'/some/path'}` will return all the entries in
|
||||||
|
* the cwd, not the entries in `/some/path`.
|
||||||
|
*
|
||||||
|
* To start absolute and non-absolute patterns in the same
|
||||||
|
* path, you can use `{root:''}`. However, be aware that on
|
||||||
|
* Windows systems, a pattern like `x:/*` or `//host/share/*` will
|
||||||
|
* _always_ start in the `x:/` or `//host/share` directory,
|
||||||
|
* regardless of the `root` setting.
|
||||||
|
*/
|
||||||
|
root?: string;
|
||||||
|
/**
|
||||||
|
* A [PathScurry](http://npm.im/path-scurry) object used
|
||||||
|
* to traverse the file system. If the `nocase` option is set
|
||||||
|
* explicitly, then any provided `scurry` object must match this
|
||||||
|
* setting.
|
||||||
|
*/
|
||||||
|
scurry?: PathScurry;
|
||||||
|
/**
|
||||||
|
* Call `lstat()` on all entries, whether required or not to determine
|
||||||
|
* whether it's a valid match. When used with {@link withFileTypes}, this
|
||||||
|
* means that matches will include data such as modified time, permissions,
|
||||||
|
* and so on. Note that this will incur a performance cost due to the added
|
||||||
|
* system calls.
|
||||||
|
*/
|
||||||
|
stat?: boolean;
|
||||||
|
/**
|
||||||
|
* An AbortSignal which will cancel the Glob walk when
|
||||||
|
* triggered.
|
||||||
|
*/
|
||||||
|
signal?: AbortSignal;
|
||||||
|
/**
|
||||||
|
* Use `\\` as a path separator _only_, and
|
||||||
|
* _never_ as an escape character. If set, all `\\` characters are
|
||||||
|
* replaced with `/` in the pattern.
|
||||||
|
*
|
||||||
|
* Note that this makes it **impossible** to match against paths
|
||||||
|
* containing literal glob pattern characters, but allows matching
|
||||||
|
* with patterns constructed using `path.join()` and
|
||||||
|
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||||
|
* behavior of Glob v7 and before on Windows. Please use with
|
||||||
|
* caution, and be mindful of [the caveat below about Windows
|
||||||
|
* paths](#windows). (For legacy reasons, this is also set if
|
||||||
|
* `allowWindowsEscape` is set to the exact value `false`.)
|
||||||
|
*/
|
||||||
|
windowsPathsNoEscape?: boolean;
|
||||||
|
/**
|
||||||
|
* Return [PathScurry](http://npm.im/path-scurry)
|
||||||
|
* `Path` objects instead of strings. These are similar to a
|
||||||
|
* NodeJS `Dirent` object, but with additional methods and
|
||||||
|
* properties.
|
||||||
|
*
|
||||||
|
* Conflicts with {@link absolute}
|
||||||
|
*/
|
||||||
|
withFileTypes?: boolean;
|
||||||
|
/**
|
||||||
|
* An fs implementation to override some or all of the defaults. See
|
||||||
|
* http://npm.im/path-scurry for details about what can be overridden.
|
||||||
|
*/
|
||||||
|
fs?: FSOption;
|
||||||
|
}
|
||||||
|
export type GlobOptionsWithFileTypesTrue = GlobOptions & {
|
||||||
|
withFileTypes: true;
|
||||||
|
absolute?: undefined;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesFalse = GlobOptions & {
|
||||||
|
withFileTypes?: false;
|
||||||
|
};
|
||||||
|
export type GlobOptionsWithFileTypesUnset = GlobOptions & {
|
||||||
|
withFileTypes?: undefined;
|
||||||
|
};
|
||||||
|
export type Result<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? Path : Opts extends GlobOptionsWithFileTypesFalse ? string : Opts extends GlobOptionsWithFileTypesUnset ? string : string | Path;
|
||||||
|
export type Results<Opts> = Result<Opts>[];
|
||||||
|
export type FileTypes<Opts> = Opts extends GlobOptionsWithFileTypesTrue ? true : Opts extends GlobOptionsWithFileTypesFalse ? false : Opts extends GlobOptionsWithFileTypesUnset ? false : boolean;
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
export declare class Glob<Opts extends GlobOptions> implements GlobOptions {
|
||||||
|
absolute?: boolean;
|
||||||
|
cwd: string;
|
||||||
|
root?: string;
|
||||||
|
dot: boolean;
|
||||||
|
dotRelative: boolean;
|
||||||
|
follow: boolean;
|
||||||
|
ignore?: string | string[] | IgnoreLike;
|
||||||
|
magicalBraces: boolean;
|
||||||
|
mark?: boolean;
|
||||||
|
matchBase: boolean;
|
||||||
|
maxDepth: number;
|
||||||
|
nobrace: boolean;
|
||||||
|
nocase: boolean;
|
||||||
|
nodir: boolean;
|
||||||
|
noext: boolean;
|
||||||
|
noglobstar: boolean;
|
||||||
|
pattern: string[];
|
||||||
|
platform: NodeJS.Platform;
|
||||||
|
realpath: boolean;
|
||||||
|
scurry: PathScurry;
|
||||||
|
stat: boolean;
|
||||||
|
signal?: AbortSignal;
|
||||||
|
windowsPathsNoEscape: boolean;
|
||||||
|
withFileTypes: FileTypes<Opts>;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts: Opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns: Pattern[];
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern: string | string[], opts: Opts);
|
||||||
|
/**
|
||||||
|
* Returns a Promise that resolves to the results array.
|
||||||
|
*/
|
||||||
|
walk(): Promise<Results<Opts>>;
|
||||||
|
/**
|
||||||
|
* synchronous {@link Glob.walk}
|
||||||
|
*/
|
||||||
|
walkSync(): Results<Opts>;
|
||||||
|
/**
|
||||||
|
* Stream results asynchronously.
|
||||||
|
*/
|
||||||
|
stream(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Stream results synchronously.
|
||||||
|
*/
|
||||||
|
streamSync(): Minipass<Result<Opts>, Result<Opts>>;
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync(): Generator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.iterator](): Generator<Result<Opts>, void, void>;
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate(): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
[Symbol.asyncIterator](): AsyncGenerator<Result<Opts>, void, void>;
|
||||||
|
}
|
||||||
|
//# sourceMappingURL=glob.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/glob.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/glob.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"glob.d.ts","sourceRoot":"","sources":["../../src/glob.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,SAAS,EAAoB,MAAM,WAAW,CAAA;AACvD,OAAO,QAAQ,MAAM,UAAU,CAAA;AAC/B,OAAO,EACL,QAAQ,EACR,IAAI,EACJ,UAAU,EAIX,MAAM,aAAa,CAAA;AAEpB,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAE,OAAO,EAAE,MAAM,cAAc,CAAA;AAGtC,MAAM,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAA;AACvC,MAAM,MAAM,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC,WAAW,CAAC,EAAE,SAAS,CAAC,CAAA;AAWlE;;;;;;;;;;;;GAYG;AACH,MAAM,WAAW,WAAW;IAC1B;;;;;;;;;;;;OAYG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;OAIG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAE5B;;;;;OAKG;IACH,GAAG,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;IAElB;;;;OAIG;IACH,GAAG,CAAC,EAAE,OAAO,CAAA;IAEb;;;;;;;;OAQG;IACH,WAAW,CAAC,EAAE,OAAO,CAAA;IAErB;;;;;;;;OAQG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;;;;;;;;;;;;;;OAgBG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IAEvC;;;;;OAKG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;;OAIG;IACH,SAAS,CAAC,EAAE,OAAO,CAAA;IAEnB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAA;IAEjB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAA;IAEjB;;;;;;;;;OASG;IACH,MAAM,CAAC,EAAE,OAAO,CAAA;IAEhB;;;OAGG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAA;IAEf;;;;;OAKG;IACH,UAAU,CAAC,EAAE,OAAO,CAAA;IAEpB;;;;OAIG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC,QAAQ,CAAA;IAE1B;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAA;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,IAAI,CAAC,EAAE,MAAM,CAAA;IAEb;;;;;OAKG;IACH,MAAM,CAAC,EAAE,UAAU,CAAA;IAEnB;;;;;;OAMG;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IAEd;;;OAGG;IACH,MAAM,CAAC,EAAE,WAAW,CAAA;IAEpB;;;;;;;;;;;;;OAaG;IACH,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAE9B;;;;;;;OAOG;IACH,aAAa,CAAC,EAAE,OAAO,CAAA;IAEvB;;;OAGG;IACH,EAAE,CAAC,EAAE,QAAQ,CAAA;CACd;AAED,MAAM,MAAM,4BAA4B,GAAG,WAAW,GAAG;IACvD,aAAa,EAAE,IAAI,CAAA;IACnB,QAAQ,CAAC,EAAE,SAAS,CAAA;CACrB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,KAAK,CAAA;CACtB,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG,WAAW,GAAG;IACxD,aAAa,CAAC,EAAE,SAAS,CAAA;CAC1B,CAAA;AAED,MAAM,MAAM,MAAM,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GAChE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,IAAI,SAAS,6BAA6B,GAC1C,MAAM,GACN,MAAM,GAAG,IAAI,CAAA;AACjB,MAAM,MAAM,OAAO,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAA;AAE1C,MAAM,MAAM,SAAS,CAAC,IAAI,IAAI,IAAI,SAAS,4BAA4B,GACnE,IAAI,GACJ,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,IAAI,SAAS,6BAA6B,GAC1C,KAAK,GACL,OAAO,CAAA;AAEX;;GAEG;AACH,qBAAa,IAAI,CAAC,IAAI,SAAS,WAAW,CAAE,YAAW,WAAW;IAChE,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,OAAO,CAAA;IACZ,WAAW,EAAE,OAAO,CAAA;IACpB,MAAM,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,GAAG,UAAU,CAAA;IACvC,aAAa,EAAE,OAAO,CAAA;IACtB,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,MAAM,CAAA;IAChB,OAAO,EAAE,OAAO,CAAA;IAChB,MAAM,EAAE,OAAO,CAAA;IACf,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,OAAO,CAAA;IACd,UAAU,EAAE,OAAO,CAAA;IACnB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAA;IACzB,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,UAAU,CAAA;IAClB,IAAI,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,oBAAoB,EAAE,OAAO,CAAA;IAC7B,aAAa,EAAE,SAAS,CAAC,IAAI,CAAC,CAAA;IAE9B;;OAEG;IACH,IAAI,EAAE,IAAI,CAAA;IAEV;;OAEG;IACH,QAAQ,EAAE,OAAO,EAAE,CAAA;IAEnB;;;;;;;;;;;OAWG;gBACS,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI;IA6GlD;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAmBpC;;OAEG;IACH,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAezB;;OAEG;IACH,MAAM,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAa9C;;OAEG;IACH,UAAU,IAAI,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;IAalD;;;OAGG;IACH,WAAW,IAAI,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGlD,CAAC,MAAM,CAAC,QAAQ,CAAC;IAIjB;;;OAGG;IACH,OAAO,IAAI,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,IAAI,CAAC;IAGnD,CAAC,MAAM,CAAC,aAAa,CAAC;CAGvB"}
|
||||||
228
node_modules/glob/dist/cjs/glob.js
generated
vendored
Normal file
228
node_modules/glob/dist/cjs/glob.js
generated
vendored
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
"use strict";
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.Glob = void 0;
|
||||||
|
const minimatch_1 = require("minimatch");
|
||||||
|
const path_scurry_1 = require("path-scurry");
|
||||||
|
const url_1 = require("url");
|
||||||
|
const pattern_js_1 = require("./pattern.js");
|
||||||
|
const walker_js_1 = require("./walker.js");
|
||||||
|
// if no process global, just call it linux.
|
||||||
|
// so we default to case-sensitive, / separators
|
||||||
|
const defaultPlatform = typeof process === 'object' &&
|
||||||
|
process &&
|
||||||
|
typeof process.platform === 'string'
|
||||||
|
? process.platform
|
||||||
|
: 'linux';
|
||||||
|
/**
|
||||||
|
* An object that can perform glob pattern traversals.
|
||||||
|
*/
|
||||||
|
class Glob {
|
||||||
|
absolute;
|
||||||
|
cwd;
|
||||||
|
root;
|
||||||
|
dot;
|
||||||
|
dotRelative;
|
||||||
|
follow;
|
||||||
|
ignore;
|
||||||
|
magicalBraces;
|
||||||
|
mark;
|
||||||
|
matchBase;
|
||||||
|
maxDepth;
|
||||||
|
nobrace;
|
||||||
|
nocase;
|
||||||
|
nodir;
|
||||||
|
noext;
|
||||||
|
noglobstar;
|
||||||
|
pattern;
|
||||||
|
platform;
|
||||||
|
realpath;
|
||||||
|
scurry;
|
||||||
|
stat;
|
||||||
|
signal;
|
||||||
|
windowsPathsNoEscape;
|
||||||
|
withFileTypes;
|
||||||
|
/**
|
||||||
|
* The options provided to the constructor.
|
||||||
|
*/
|
||||||
|
opts;
|
||||||
|
/**
|
||||||
|
* An array of parsed immutable {@link Pattern} objects.
|
||||||
|
*/
|
||||||
|
patterns;
|
||||||
|
/**
|
||||||
|
* All options are stored as properties on the `Glob` object.
|
||||||
|
*
|
||||||
|
* See {@link GlobOptions} for full options descriptions.
|
||||||
|
*
|
||||||
|
* Note that a previous `Glob` object can be passed as the
|
||||||
|
* `GlobOptions` to another `Glob` instantiation to re-use settings
|
||||||
|
* and caches with a new pattern.
|
||||||
|
*
|
||||||
|
* Traversal functions can be called multiple times to run the walk
|
||||||
|
* again.
|
||||||
|
*/
|
||||||
|
constructor(pattern, opts) {
|
||||||
|
this.withFileTypes = !!opts.withFileTypes;
|
||||||
|
this.signal = opts.signal;
|
||||||
|
this.follow = !!opts.follow;
|
||||||
|
this.dot = !!opts.dot;
|
||||||
|
this.dotRelative = !!opts.dotRelative;
|
||||||
|
this.nodir = !!opts.nodir;
|
||||||
|
this.mark = !!opts.mark;
|
||||||
|
if (!opts.cwd) {
|
||||||
|
this.cwd = '';
|
||||||
|
}
|
||||||
|
else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
|
||||||
|
opts.cwd = (0, url_1.fileURLToPath)(opts.cwd);
|
||||||
|
}
|
||||||
|
this.cwd = opts.cwd || '';
|
||||||
|
this.root = opts.root;
|
||||||
|
this.magicalBraces = !!opts.magicalBraces;
|
||||||
|
this.nobrace = !!opts.nobrace;
|
||||||
|
this.noext = !!opts.noext;
|
||||||
|
this.realpath = !!opts.realpath;
|
||||||
|
this.absolute = opts.absolute;
|
||||||
|
this.noglobstar = !!opts.noglobstar;
|
||||||
|
this.matchBase = !!opts.matchBase;
|
||||||
|
this.maxDepth =
|
||||||
|
typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
|
||||||
|
this.stat = !!opts.stat;
|
||||||
|
this.ignore = opts.ignore;
|
||||||
|
if (this.withFileTypes && this.absolute !== undefined) {
|
||||||
|
throw new Error('cannot set absolute and withFileTypes:true');
|
||||||
|
}
|
||||||
|
if (typeof pattern === 'string') {
|
||||||
|
pattern = [pattern];
|
||||||
|
}
|
||||||
|
this.windowsPathsNoEscape =
|
||||||
|
!!opts.windowsPathsNoEscape ||
|
||||||
|
opts.allowWindowsEscape === false;
|
||||||
|
if (this.windowsPathsNoEscape) {
|
||||||
|
pattern = pattern.map(p => p.replace(/\\/g, '/'));
|
||||||
|
}
|
||||||
|
if (this.matchBase) {
|
||||||
|
if (opts.noglobstar) {
|
||||||
|
throw new TypeError('base matching requires globstar');
|
||||||
|
}
|
||||||
|
pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
|
||||||
|
}
|
||||||
|
this.pattern = pattern;
|
||||||
|
this.platform = opts.platform || defaultPlatform;
|
||||||
|
this.opts = { ...opts, platform: this.platform };
|
||||||
|
if (opts.scurry) {
|
||||||
|
this.scurry = opts.scurry;
|
||||||
|
if (opts.nocase !== undefined &&
|
||||||
|
opts.nocase !== opts.scurry.nocase) {
|
||||||
|
throw new Error('nocase option contradicts provided scurry option');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const Scurry = opts.platform === 'win32'
|
||||||
|
? path_scurry_1.PathScurryWin32
|
||||||
|
: opts.platform === 'darwin'
|
||||||
|
? path_scurry_1.PathScurryDarwin
|
||||||
|
: opts.platform
|
||||||
|
? path_scurry_1.PathScurryPosix
|
||||||
|
: path_scurry_1.PathScurry;
|
||||||
|
this.scurry = new Scurry(this.cwd, {
|
||||||
|
nocase: opts.nocase,
|
||||||
|
fs: opts.fs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this.nocase = this.scurry.nocase;
|
||||||
|
const mmo = {
|
||||||
|
// default nocase based on platform
|
||||||
|
...opts,
|
||||||
|
dot: this.dot,
|
||||||
|
matchBase: this.matchBase,
|
||||||
|
nobrace: this.nobrace,
|
||||||
|
nocase: this.nocase,
|
||||||
|
nocaseMagicOnly: true,
|
||||||
|
nocomment: true,
|
||||||
|
noext: this.noext,
|
||||||
|
nonegate: true,
|
||||||
|
optimizationLevel: 2,
|
||||||
|
platform: this.platform,
|
||||||
|
windowsPathsNoEscape: this.windowsPathsNoEscape,
|
||||||
|
};
|
||||||
|
const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
|
||||||
|
const [matchSet, globParts] = mms.reduce((set, m) => {
|
||||||
|
set[0].push(...m.set);
|
||||||
|
set[1].push(...m.globParts);
|
||||||
|
return set;
|
||||||
|
}, [[], []]);
|
||||||
|
this.patterns = matchSet.map((set, i) => {
|
||||||
|
return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
async walk() {
|
||||||
|
// Walkers always return array of Path objects, so we just have to
|
||||||
|
// coerce them into the right shape. It will have already called
|
||||||
|
// realpath() if the option was set to do so, so we know that's cached.
|
||||||
|
// start out knowing the cwd, at least
|
||||||
|
return [
|
||||||
|
...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walk()),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
walkSync() {
|
||||||
|
return [
|
||||||
|
...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).walkSync(),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
stream() {
|
||||||
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).stream();
|
||||||
|
}
|
||||||
|
streamSync() {
|
||||||
|
return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
|
||||||
|
...this.opts,
|
||||||
|
maxDepth: this.maxDepth !== Infinity
|
||||||
|
? this.maxDepth + this.scurry.cwd.depth()
|
||||||
|
: Infinity,
|
||||||
|
platform: this.platform,
|
||||||
|
nocase: this.nocase,
|
||||||
|
}).streamSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default sync iteration function. Returns a Generator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterateSync() {
|
||||||
|
return this.streamSync()[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
[Symbol.iterator]() {
|
||||||
|
return this.iterateSync();
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Default async iteration function. Returns an AsyncGenerator that
|
||||||
|
* iterates over the results.
|
||||||
|
*/
|
||||||
|
iterate() {
|
||||||
|
return this.stream()[Symbol.asyncIterator]();
|
||||||
|
}
|
||||||
|
[Symbol.asyncIterator]() {
|
||||||
|
return this.iterate();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exports.Glob = Glob;
|
||||||
|
//# sourceMappingURL=glob.js.map
|
||||||
1
node_modules/glob/dist/cjs/glob.js.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/glob.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
node_modules/glob/dist/cjs/has-magic.d.ts
generated
vendored
Normal file
14
node_modules/glob/dist/cjs/has-magic.d.ts
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { GlobOptions } from './glob.js';
|
||||||
|
/**
|
||||||
|
* Return true if the patterns provided contain any magic glob characters,
|
||||||
|
* given the options provided.
|
||||||
|
*
|
||||||
|
* Brace expansion is not considered "magic" unless the `magicalBraces` option
|
||||||
|
* is set, as brace expansion just turns one string into an array of strings.
|
||||||
|
* So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
|
||||||
|
* `'xby'` both do not contain any magic glob characters, and it's treated the
|
||||||
|
* same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
|
||||||
|
* is in the options, brace expansion _is_ treated as a pattern having magic.
|
||||||
|
*/
|
||||||
|
export declare const hasMagic: (pattern: string | string[], options?: GlobOptions) => boolean;
|
||||||
|
//# sourceMappingURL=has-magic.d.ts.map
|
||||||
1
node_modules/glob/dist/cjs/has-magic.d.ts.map
generated
vendored
Normal file
1
node_modules/glob/dist/cjs/has-magic.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"has-magic.d.ts","sourceRoot":"","sources":["../../src/has-magic.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,WAAW,EAAE,MAAM,WAAW,CAAA;AAEvC;;;;;;;;;;GAUG;AACH,eAAO,MAAM,QAAQ,YACV,MAAM,GAAG,MAAM,EAAE,YACjB,WAAW,KACnB,OAQF,CAAA"}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user