mirror of
https://github.com/github/codeql-action.git
synced 2025-12-08 08:48:19 +08:00
Compare commits
66 Commits
codeql-bun
...
v2.21.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1813ca74c3 | ||
|
|
6843540876 | ||
|
|
6ae46f7a92 | ||
|
|
0cae69e062 | ||
|
|
d2ed0a05b6 | ||
|
|
651d09131a | ||
|
|
e0f0892f83 | ||
|
|
27d3b2f857 | ||
|
|
da4e0a06c0 | ||
|
|
e266801e21 | ||
|
|
99c9f6a498 | ||
|
|
942acabcd0 | ||
|
|
e0806ce653 | ||
|
|
84e042b8a4 | ||
|
|
5719ca3eb6 | ||
|
|
d85a177b9b | ||
|
|
2e8581811e | ||
|
|
262017ad69 | ||
|
|
eb1ef12e40 | ||
|
|
2ff6d83d07 | ||
|
|
5246291397 | ||
|
|
013a1d0cb2 | ||
|
|
aedd8c2a63 | ||
|
|
9a97b34d8c | ||
|
|
489225d82a | ||
|
|
1b6383d6be | ||
|
|
a148c58075 | ||
|
|
50527c5dba | ||
|
|
814b2edab6 | ||
|
|
d2baed4b69 | ||
|
|
c5526174a5 | ||
|
|
c1f49580cf | ||
|
|
40a500c743 | ||
|
|
4fad06f438 | ||
|
|
07224254ab | ||
|
|
7d35788421 | ||
|
|
36928bf506 | ||
|
|
d1a140b28e | ||
|
|
12aa0a6e01 | ||
|
|
4b7eb74ef5 | ||
|
|
863a05b28b | ||
|
|
d6c8719550 | ||
|
|
859354c7e2 | ||
|
|
0fc0483240 | ||
|
|
e828ed68c6 | ||
|
|
6639a31758 | ||
|
|
95a5fda31a | ||
|
|
b3406fda8f | ||
|
|
916cfef293 | ||
|
|
fed45865ba | ||
|
|
6a07b2ad43 | ||
|
|
8f80d7761c | ||
|
|
49f147856e | ||
|
|
b8490d73e8 | ||
|
|
ab9aa50acb | ||
|
|
28e070c442 | ||
|
|
f232722edf | ||
|
|
f60aebf286 | ||
|
|
9f45792756 | ||
|
|
98884904a2 | ||
|
|
5be8bd1c16 | ||
|
|
eb5d323227 | ||
|
|
834c357222 | ||
|
|
7bd23b58b5 | ||
|
|
f1c4784a3f | ||
|
|
a590b4afa7 |
2
.github/actions/check-sarif/action.yml
vendored
2
.github/actions/check-sarif/action.yml
vendored
@@ -16,5 +16,5 @@ inputs:
|
||||
Comma separated list of query ids that should NOT be included in this SARIF file.
|
||||
|
||||
runs:
|
||||
using: node12
|
||||
using: node16
|
||||
main: index.js
|
||||
|
||||
26
.github/dependabot.yml
vendored
26
.github/dependabot.yml
vendored
@@ -6,16 +6,34 @@ updates:
|
||||
interval: weekly
|
||||
labels:
|
||||
- Update dependencies
|
||||
# Ignore incompatible dependency updates
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types:
|
||||
- version-update:semver-minor
|
||||
- version-update:semver-patch
|
||||
# There is a type incompatibility issue between v0.0.9 and our other dependencies.
|
||||
- dependency-name: "@octokit/plugin-retry"
|
||||
versions: ["~6.0.0"]
|
||||
# There is a type incompatibility issue between v0.0.9 and our other dependencies.
|
||||
- dependency-name: "@schemastore/package"
|
||||
versions: ["0.0.9"]
|
||||
# v7 requires ESM
|
||||
- dependency-name: "del"
|
||||
versions: ["^7.0.0"]
|
||||
groups:
|
||||
npm:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
actions:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/.github/actions/setup-swift/" # All subdirectories outside of "/.github/workflows" must be explicitly included.
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
actions-setup-swift:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
14
.github/workflows/__analyze-ref-input.yml
generated
vendored
14
.github/workflows/__analyze-ref-input.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Analyze: 'ref' and 'sha' from inputs"
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
2
.github/workflows/__autobuild-action.yml
generated
vendored
2
.github/workflows/__autobuild-action.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - autobuild-action
|
||||
|
||||
3
.github/workflows/__config-export.yml
generated
vendored
3
.github/workflows/__config-export.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Config export
|
||||
@@ -101,6 +101,5 @@ jobs:
|
||||
}
|
||||
core.info('Finished config export tests.');
|
||||
env:
|
||||
CODEQL_ACTION_EXPORT_CODE_SCANNING_CONFIG: true
|
||||
CODEQL_PASS_CONFIG_TO_CLI: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
2
.github/workflows/__diagnostics-export.yml
generated
vendored
2
.github/workflows/__diagnostics-export.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Diagnostic export
|
||||
|
||||
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
2
.github/workflows/__export-file-baseline-information.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Export file baseline information
|
||||
|
||||
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
2
.github/workflows/__extractor-ram-threads.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Extractor ram and threads options test
|
||||
|
||||
14
.github/workflows/__go-custom-queries.yml
generated
vendored
14
.github/workflows/__go-custom-queries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: Custom queries'
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
10
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
10
.github/workflows/__go-tracing-autobuilder.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with autobuilder step'
|
||||
@@ -25,10 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -41,6 +37,10 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: macos-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
10
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
10
.github/workflows/__go-tracing-custom-build-steps.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with custom build steps'
|
||||
@@ -25,10 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -41,6 +37,10 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: macos-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
10
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
10
.github/workflows/__go-tracing-legacy-workflow.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Go: tracing with legacy workflow'
|
||||
@@ -25,10 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -41,6 +37,10 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: macos-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
2
.github/workflows/__init-with-registries.yml
generated
vendored
2
.github/workflows/__init-with-registries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Download using registries'
|
||||
|
||||
2
.github/workflows/__javascript-source-root.yml
generated
vendored
2
.github/workflows/__javascript-source-root.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Custom source root
|
||||
|
||||
32
.github/workflows/__ml-powered-queries.yml
generated
vendored
32
.github/workflows/__ml-powered-queries.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - ML-powered queries
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
@@ -110,8 +110,6 @@ jobs:
|
||||
|
||||
- name: Check sarif
|
||||
uses: ./../action/.github/actions/check-sarif
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+.
|
||||
if: "!(matrix.version == 'stable-20220401' && runner.os == 'Windows')"
|
||||
with:
|
||||
sarif-file: ${{ runner.temp }}/results/javascript.sarif
|
||||
queries-run:
|
||||
@@ -119,14 +117,8 @@ jobs:
|
||||
queries-not-run: foo,bar
|
||||
|
||||
- name: Check results
|
||||
env:
|
||||
# Running on Windows requires CodeQL CLI 2.9.0+.
|
||||
SHOULD_RUN_ML_POWERED_QUERIES: ${{ !(matrix.version == 'stable-20220401' &&
|
||||
runner.os == 'Windows') }}
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Expecting ML-powered queries to be run: ${SHOULD_RUN_ML_POWERED_QUERIES}"
|
||||
|
||||
cd "$RUNNER_TEMP/results"
|
||||
# We should run at least the ML-powered queries in `expected_rules`.
|
||||
expected_rules="js/ml-powered/nosql-injection js/ml-powered/path-injection js/ml-powered/sql-injection js/ml-powered/xss"
|
||||
@@ -135,12 +127,9 @@ jobs:
|
||||
found_rule=$(jq --arg rule "${rule}" '[.runs[0].tool.extensions[].rules | select(. != null) |
|
||||
flatten | .[].id] | any(. == $rule)' javascript.sarif)
|
||||
echo "Did find rule '${rule}': ${found_rule}"
|
||||
if [[ "${found_rule}" != "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
if [[ "${found_rule}" != "true" ]]; then
|
||||
echo "Expected SARIF output to contain rule '${rule}', but found no such rule."
|
||||
exit 1
|
||||
elif [[ "${found_rule}" == "true" && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Found rule '${rule}' in the SARIF output which shouldn't have been part of the analysis."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -149,12 +138,9 @@ jobs:
|
||||
select(.properties.score != null and (.rule.id | startswith("js/ml-powered/")))] | length' \
|
||||
javascript.sarif)
|
||||
echo "Found ${num_alerts} alerts from ML-powered queries.";
|
||||
if [[ "${num_alerts}" -eq 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" == "true" ]]; then
|
||||
if [[ "${num_alerts}" -eq 0 ]]; then
|
||||
echo "Expected to find at least one alert from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
elif [[ "${num_alerts}" -ne 0 && "${SHOULD_RUN_ML_POWERED_QUERIES}" != "true" ]]; then
|
||||
echo "Expected not to find any alerts from an ML-powered query but found ${num_alerts}."
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
10
.github/workflows/__multi-language-autodetect.yml
generated
vendored
10
.github/workflows/__multi-language-autodetect.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Multi-language repository
|
||||
@@ -25,10 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -41,6 +37,10 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: macos-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
2
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-codescanning-config-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input passed to the CLI'
|
||||
|
||||
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-config-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config and input'
|
||||
|
||||
2
.github/workflows/__packaging-config-js.yml
generated
vendored
2
.github/workflows/__packaging-config-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Config file'
|
||||
|
||||
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
2
.github/workflows/__packaging-inputs-js.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Packaging: Action input'
|
||||
|
||||
14
.github/workflows/__remote-config.yml
generated
vendored
14
.github/workflows/__remote-config.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Remote config file
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
2
.github/workflows/__resolve-environment-action.yml
generated
vendored
2
.github/workflows/__resolve-environment-action.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Resolve environment
|
||||
|
||||
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
2
.github/workflows/__rubocop-multi-language.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - RuboCop multi-language
|
||||
|
||||
2
.github/workflows/__ruby.yml
generated
vendored
2
.github/workflows/__ruby.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Ruby analysis
|
||||
|
||||
98
.github/workflows/__scaling-reserved-ram.yml
generated
vendored
Normal file
98
.github/workflows/__scaling-reserved-ram.yml
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Scaling reserved RAM
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN: 'true'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/v2
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
scaling-reserved-ram:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
version: stable-20220615
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220908
|
||||
- os: macos-latest
|
||||
version: stable-20220908
|
||||
- os: ubuntu-latest
|
||||
version: stable-20221211
|
||||
- os: macos-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
version: latest
|
||||
- os: macos-latest
|
||||
version: latest
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
- os: macos-latest
|
||||
version: nightly-latest
|
||||
name: Scaling reserved RAM
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Prepare test
|
||||
id: prepare-test
|
||||
uses: ./.github/actions/prepare-test
|
||||
with:
|
||||
version: ${{ matrix.version }}
|
||||
- name: Set environment variable for Swift enablement
|
||||
if: >-
|
||||
runner.os != 'Windows' && (
|
||||
matrix.version == '20220908' ||
|
||||
matrix.version == '20221211'
|
||||
)
|
||||
shell: bash
|
||||
run: echo "CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT=true" >> $GITHUB_ENV
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
db-location: ${{ runner.temp }}/customDbLocation
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/.github/actions/setup-swift
|
||||
with:
|
||||
codeql-path: ${{ steps.init.outputs.codeql-path }}
|
||||
|
||||
- name: Build code
|
||||
shell: bash
|
||||
run: ./build.sh
|
||||
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
env:
|
||||
CODEQL_ACTION_SCALING_RESERVED_RAM: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
2
.github/workflows/__split-workflow.yml
generated
vendored
2
.github/workflows/__split-workflow.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Split workflow
|
||||
|
||||
2
.github/workflows/__submit-sarif-failure.yml
generated
vendored
2
.github/workflows/__submit-sarif-failure.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Submit SARIF after failure
|
||||
|
||||
2
.github/workflows/__swift-custom-build.yml
generated
vendored
2
.github/workflows/__swift-custom-build.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Swift analysis using a custom build command
|
||||
|
||||
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
2
.github/workflows/__test-autobuild-working-dir.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Autobuild working directory
|
||||
|
||||
2
.github/workflows/__test-local-codeql.yml
generated
vendored
2
.github/workflows/__test-local-codeql.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Local CodeQL bundle
|
||||
|
||||
2
.github/workflows/__test-proxy.yml
generated
vendored
2
.github/workflows/__test-proxy.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Proxy test
|
||||
|
||||
6
.github/workflows/__unset-environment.yml
generated
vendored
6
.github/workflows/__unset-environment.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Test unsetting environment variables
|
||||
@@ -25,14 +25,14 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220908
|
||||
- os: ubuntu-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: ubuntu-latest
|
||||
|
||||
14
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
14
.github/workflows/__upload-ref-sha-input.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: "PR Check - Upload-sarif: 'ref' and 'sha' from inputs"
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
14
.github/workflows/__with-checkout-path.yml
generated
vendored
14
.github/workflows/__with-checkout-path.yml
generated
vendored
@@ -1,6 +1,6 @@
|
||||
# Warning: This file is generated automatically, and should not be modified.
|
||||
# Instead, please modify the template in the pr-checks directory and run:
|
||||
# (cd pr-checks; pip install ruamel.yaml && python3 sync.py)
|
||||
# (cd pr-checks; pip install ruamel.yaml@0.17.31 && python3 sync.py)
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Use a custom `checkout_path`
|
||||
@@ -25,12 +25,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220401
|
||||
- os: macos-latest
|
||||
version: stable-20220401
|
||||
- os: windows-latest
|
||||
version: stable-20220401
|
||||
- os: ubuntu-latest
|
||||
version: stable-20220615
|
||||
- os: macos-latest
|
||||
@@ -49,6 +43,12 @@ jobs:
|
||||
version: stable-20221211
|
||||
- os: windows-latest
|
||||
version: stable-20221211
|
||||
- os: ubuntu-latest
|
||||
version: stable-20230418
|
||||
- os: macos-latest
|
||||
version: stable-20230418
|
||||
- os: windows-latest
|
||||
version: stable-20230418
|
||||
- os: ubuntu-latest
|
||||
version: cached
|
||||
- os: macos-latest
|
||||
|
||||
4
.github/workflows/debug-artifacts.yml
vendored
4
.github/workflows/debug-artifacts.yml
vendored
@@ -25,10 +25,10 @@ jobs:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
version:
|
||||
- stable-20220401
|
||||
- stable-20220615
|
||||
- stable-20220908
|
||||
- stable-20221211
|
||||
- stable-20230418
|
||||
- cached
|
||||
- latest
|
||||
- nightly-latest
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
- name: Check expected artifacts exist
|
||||
shell: bash
|
||||
run: |
|
||||
VERSIONS="stable-20220401 stable-20220615 stable-20220908 stable-20221211 cached latest nightly-latest"
|
||||
VERSIONS="stable-20220615 stable-20220908 stable-20221211 stable-20230418 cached latest nightly-latest"
|
||||
LANGUAGES="cpp csharp go java javascript python"
|
||||
for version in $VERSIONS; do
|
||||
for os in ubuntu-latest macos-latest; do
|
||||
|
||||
5
.github/workflows/pr-checks.yml
vendored
5
.github/workflows/pr-checks.yml
vendored
@@ -47,12 +47,13 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: 3.11
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install ruamel.yaml
|
||||
# When updating this, update the autogenerated code header in `sync.py` too.
|
||||
pip install ruamel.yaml==0.17.31
|
||||
|
||||
# Ensure the generated PR check workflows are up to date.
|
||||
- name: Verify PR checks up to date
|
||||
|
||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -1,10 +1,18 @@
|
||||
# CodeQL Action Changelog
|
||||
|
||||
## [UNRELEASED]
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## 2.21.0 - 19 Jul 2023
|
||||
|
||||
- CodeQL Action now requires CodeQL CLI 2.9.4 or later. For more information, see the corresponding changelog entry for CodeQL Action version 2.20.4. [#1724](https://github.com/github/codeql-action/pull/1724)
|
||||
|
||||
## 2.20.4 - 14 Jul 2023
|
||||
|
||||
- This is the last release of the Action that supports CodeQL CLI versions 2.8.5 to 2.9.3. These versions of the CodeQL CLI were deprecated on June 20, 2023 alongside GitHub Enterprise Server 3.5 and will not be supported by the next release of the CodeQL Action (2.21.0).
|
||||
- If you are using one of these versions, please update to CodeQL CLI version 2.9.4 or later. For instance, if you have specified a custom version of the CLI using the 'tools' input to the 'init' Action, you can remove this input to use the default version.
|
||||
- Alternatively, if you want to continue using a version of the CodeQL CLI between 2.8.5 and 2.9.3, you can replace 'github/codeql-action/*@v2' by 'github/codeql-action/*@v2.20.4' in your code scanning workflow to ensure you continue using this version of the CodeQL Action.
|
||||
- We are rolling out a feature in July 2023 that will slightly reduce the default amount of RAM used for query execution, in proportion to the runner's total memory. This will help to avoid out-of-memory failures on larger runners. [#1760](https://github.com/github/codeql-action/pull/1760)
|
||||
- Update default CodeQL bundle version to 2.14.0. [#1762](https://github.com/github/codeql-action/pull/1762)
|
||||
|
||||
## 2.20.3 - 06 Jul 2023
|
||||
|
||||
|
||||
2
lib/analyze-action.js
generated
2
lib/analyze-action.js
generated
@@ -158,10 +158,10 @@ async function run() {
|
||||
const apiDetails = (0, api_client_1.getApiDetails)();
|
||||
const outputDir = actionsUtil.getRequiredInput("output");
|
||||
const threads = util.getThreadsFlag(actionsUtil.getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"]);
|
||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY"));
|
||||
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
const memory = await util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], features);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, config, logger, features);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/analyze.js
generated
4
lib/analyze.js
generated
@@ -223,7 +223,9 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
completed_at: endTimeInterpretResults.toISOString(),
|
||||
exit_status: "success",
|
||||
language,
|
||||
properties: perQueryAlertCounts,
|
||||
properties: {
|
||||
alertCounts: perQueryAlertCounts,
|
||||
},
|
||||
};
|
||||
if (statusReport["event_reports"] === undefined) {
|
||||
statusReport["event_reports"] = [];
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/analyze.test.js
generated
4
lib/analyze.test.js
generated
@@ -154,6 +154,8 @@ const util = __importStar(require("./util"));
|
||||
if (builtinStatusReport.event_reports) {
|
||||
for (const eventReport of builtinStatusReport.event_reports) {
|
||||
t.deepEqual(eventReport.event, "codeql database interpret-results");
|
||||
t.true("properties" in eventReport);
|
||||
t.true("alertCounts" in eventReport.properties);
|
||||
}
|
||||
}
|
||||
config.queries[language] = {
|
||||
@@ -181,6 +183,8 @@ const util = __importStar(require("./util"));
|
||||
if (customStatusReport.event_reports) {
|
||||
for (const eventReport of customStatusReport.event_reports) {
|
||||
t.deepEqual(eventReport.event, "codeql database interpret-results");
|
||||
t.true("properties" in eventReport);
|
||||
t.true("alertCounts" in eventReport.properties);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/api-client.js
generated
2
lib/api-client.js
generated
@@ -37,7 +37,7 @@ var DisallowedAPIVersionReason;
|
||||
(function (DisallowedAPIVersionReason) {
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||
})(DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = DisallowedAPIVersionReason = {}));
|
||||
function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) {
|
||||
const auth = (allowExternal && apiDetails.externalRepoAuth) || apiDetails.auth;
|
||||
const retryingOctokit = githubUtils.GitHub.plugin(retry.retry);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAoE;AACpE,6CAA+B;AAC/B,iCAA4D;AAE5D,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,GAA1B,kCAA0B,KAA1B,kCAA0B,QAGrC;AAiBD,SAAS,0BAA0B,CACjC,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,UAAU,CAAC,MAAM;QAC1B,SAAS,EAAE,iBAAiB,IAAA,+BAAgB,GAAE,EAAE;QAChD,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC;AAED,SAAgB,aAAa;IAC3B,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;QAC7C,MAAM,EAAE,IAAA,0BAAmB,EAAC,gBAAgB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAND,sCAMC;AAED,SAAgB,YAAY;IAC1B,OAAO,0BAA0B,CAAC,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAFD,oCAEC;AAED,SAAgB,4BAA4B,CAC1C,UAAoC;IAEpC,OAAO,0BAA0B,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;AACzE,CAAC;AAJD,oEAIC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;GAMG;AACI,KAAK,UAAU,gBAAgB;IACpC,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AALD,4CAKC"}
|
||||
{"version":3,"file":"api-client.js","sourceRoot":"","sources":["../src/api-client.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uEAAyD;AACzD,6DAA+C;AAC/C,0EAAgD;AAEhD,iDAAoE;AACpE,6CAA+B;AAC/B,iCAA4D;AAE5D,IAAY,0BAGX;AAHD,WAAY,0BAA0B;IACpC,+FAAc,CAAA;IACd,+FAAc,CAAA;AAChB,CAAC,EAHW,0BAA0B,0CAA1B,0BAA0B,QAGrC;AAiBD,SAAS,0BAA0B,CACjC,UAAoC,EACpC,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,EAAE;IAE9B,MAAM,IAAI,GACR,CAAC,aAAa,IAAI,UAAU,CAAC,gBAAgB,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC;IACpE,MAAM,eAAe,GAAG,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAC/D,OAAO,IAAI,eAAe,CACxB,WAAW,CAAC,iBAAiB,CAAC,IAAI,EAAE;QAClC,OAAO,EAAE,UAAU,CAAC,MAAM;QAC1B,SAAS,EAAE,iBAAiB,IAAA,+BAAgB,GAAE,EAAE;QAChD,GAAG,EAAE,IAAA,2BAAe,EAAC,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;KACzC,CAAC,CACH,CAAC;AACJ,CAAC;AAED,SAAgB,aAAa;IAC3B,OAAO;QACL,IAAI,EAAE,IAAA,+BAAgB,EAAC,OAAO,CAAC;QAC/B,GAAG,EAAE,IAAA,0BAAmB,EAAC,mBAAmB,CAAC;QAC7C,MAAM,EAAE,IAAA,0BAAmB,EAAC,gBAAgB,CAAC;KAC9C,CAAC;AACJ,CAAC;AAND,sCAMC;AAED,SAAgB,YAAY;IAC1B,OAAO,0BAA0B,CAAC,aAAa,EAAE,CAAC,CAAC;AACrD,CAAC;AAFD,oCAEC;AAED,SAAgB,4BAA4B,CAC1C,UAAoC;IAEpC,OAAO,0BAA0B,CAAC,UAAU,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;AACzE,CAAC;AAJD,oEAIC;AAED,IAAI,mBAAmB,GAA8B,SAAS,CAAC;AAE/D;;;;;;GAMG;AACI,KAAK,UAAU,gBAAgB;IACpC,IAAI,mBAAmB,KAAK,SAAS,EAAE;QACrC,mBAAmB,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,EAAE,CAAC,CAAC;KACpE;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AALD,4CAKC"}
|
||||
@@ -1 +1 @@
|
||||
{ "maximumVersion": "3.10", "minimumVersion": "3.5" }
|
||||
{ "maximumVersion": "3.10", "minimumVersion": "3.6" }
|
||||
|
||||
14
lib/codeql.js
generated
14
lib/codeql.js
generated
@@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
exports.getExtraOptions = exports.getCodeQLForCmd = exports.getCodeQLForTesting = exports.getCachedCodeQL = exports.setCodeQL = exports.getCodeQL = exports.setupCodeQL = exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY = exports.CODEQL_VERSION_BUNDLE_SEMANTICALLY_VERSIONED = exports.CODEQL_VERSION_RESOLVE_ENVIRONMENT = exports.CODEQL_VERSION_INIT_WITH_QLCONFIG = exports.CODEQL_VERSION_EXPORT_CODE_SCANNING_CONFIG = exports.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE = exports.CODEQL_VERSION_BETTER_RESOLVE_LANGUAGES = exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = exports.CommandInvocationError = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
@@ -62,7 +62,7 @@ let cachedCodeQL = undefined;
|
||||
* The version flags below can be used to conditionally enable certain features
|
||||
* on versions newer than this.
|
||||
*/
|
||||
const CODEQL_MINIMUM_VERSION = "2.8.5";
|
||||
const CODEQL_MINIMUM_VERSION = "2.9.4";
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
*/
|
||||
@@ -76,12 +76,6 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||
/**
|
||||
* Versions 2.9.0+ of the CodeQL CLI run machine learning models from a temporary directory, which
|
||||
* resolves an issue on Windows where TensorFlow models are not correctly loaded due to the path of
|
||||
* some of their files being greater than MAX_PATH (260 characters).
|
||||
*/
|
||||
exports.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS = "2.9.0";
|
||||
/**
|
||||
* Previous versions had the option already, but were missing the
|
||||
* --extractor-options-verbosity that we need.
|
||||
@@ -473,10 +467,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||
else if (await util.codeQlVersionAbove(this, "2.12.4")) {
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, codeql)) {
|
||||
if (await features.getValue(feature_flags_1.Feature.NewAnalysisSummaryEnabled, this)) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
}
|
||||
else if (await util.codeQlVersionAbove(codeql, exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)) {
|
||||
else if (await util.codeQlVersionAbove(this, exports.CODEQL_VERSION_NEW_ANALYSIS_SUMMARY)) {
|
||||
codeqlArgs.push("--no-new-analysis-summary");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
|
||||
File diff suppressed because one or more lines are too long
38
lib/codeql.test.js
generated
38
lib/codeql.test.js
generated
@@ -363,7 +363,7 @@ for (const isBundleVersionInUrl of [true, false]) {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||
sinon.stub(codeqlObject, "getVersion").resolves("2.9.4");
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
const thisStubConfig = {
|
||||
@@ -668,6 +668,42 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||
});
|
||||
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
{
|
||||
featureEnabled: true,
|
||||
codeqlVersion: "2.14.0",
|
||||
flagPassed: true,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
featureEnabled: false,
|
||||
codeqlVersion: "2.14.0",
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: true,
|
||||
},
|
||||
{
|
||||
featureEnabled: false,
|
||||
codeqlVersion: "2.13.5",
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
];
|
||||
for (const { featureEnabled, codeqlVersion, flagPassed, negativeFlagPassed, } of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
|
||||
(0, ava_1.default)(`database interpret-results passes ${flagPassed
|
||||
? "--new-analysis-summary"
|
||||
: negativeFlagPassed
|
||||
? "--no-new-analysis-summary"
|
||||
: "nothing"} for CodeQL CLI v${codeqlVersion} when the new analysis summary feature is ${featureEnabled ? "enabled" : "disabled"}`, async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", stubConfig, (0, testing_utils_1.createFeatures)(featureEnabled ? [feature_flags_1.Feature.NewAnalysisSummaryEnabled] : []), (0, logging_1.getRunnerLogger)(true));
|
||||
t.is(runnerConstructorStub.firstCall.args[1].includes("--new-analysis-summary"), flagPassed, `--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`);
|
||||
t.is(runnerConstructorStub.firstCall.args[1].includes("--no-new-analysis-summary"), negativeFlagPassed, `--no-new-analysis-summary should${negativeFlagPassed ? "" : "n't"} be passed`);
|
||||
});
|
||||
}
|
||||
function stubToolRunnerConstructor() {
|
||||
const runnerObjectStub = sinon.createStubInstance(toolrunner.ToolRunner);
|
||||
runnerObjectStub.exec.resolves(0);
|
||||
|
||||
File diff suppressed because one or more lines are too long
102
lib/config-utils.js
generated
102
lib/config-utils.js
generated
@@ -81,13 +81,13 @@ function validateQueries(resolvedQueries) {
|
||||
const noDeclaredLanguage = resolvedQueries.noDeclaredLanguage;
|
||||
const noDeclaredLanguageQueries = Object.keys(noDeclaredLanguage);
|
||||
if (noDeclaredLanguageQueries.length !== 0) {
|
||||
throw new Error(`${"The following queries do not declare a language. " +
|
||||
throw new util_1.UserError(`${"The following queries do not declare a language. " +
|
||||
"Their qlpack.yml files are either missing or is invalid.\n"}${noDeclaredLanguageQueries.join("\n")}`);
|
||||
}
|
||||
const multipleDeclaredLanguages = resolvedQueries.multipleDeclaredLanguages;
|
||||
const multipleDeclaredLanguagesQueries = Object.keys(multipleDeclaredLanguages);
|
||||
if (multipleDeclaredLanguagesQueries.length !== 0) {
|
||||
throw new Error(`${"The following queries declare multiple languages. " +
|
||||
throw new util_1.UserError(`${"The following queries declare multiple languages. " +
|
||||
"Their qlpack.yml files are either missing or is invalid.\n"}${multipleDeclaredLanguagesQueries.join("\n")}`);
|
||||
}
|
||||
}
|
||||
@@ -145,22 +145,18 @@ async function addBuiltinSuiteQueries(languages, codeQL, resultMap, packs, suite
|
||||
let injectedMlQueries = false;
|
||||
const found = builtinSuites.find((suite) => suite === suiteName);
|
||||
if (!found) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, suiteName));
|
||||
throw new util_1.UserError(getQueryUsesInvalid(configFile, suiteName));
|
||||
}
|
||||
if (suiteName === "security-experimental" &&
|
||||
!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE))) {
|
||||
throw new Error(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||
throw new util_1.UserError(`The 'security-experimental' suite is not supported on CodeQL CLI versions earlier than
|
||||
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE}. Please upgrade to CodeQL CLI version
|
||||
${codeql_1.CODEQL_VERSION_SECURITY_EXPERIMENTAL_SUITE} or later.`);
|
||||
}
|
||||
// If we're running the JavaScript security-extended analysis (or a superset of it), the repo is
|
||||
// opted into the ML-powered queries beta, and a user hasn't already added the ML-powered query
|
||||
// pack, then add the ML-powered query pack so that we run ML-powered queries.
|
||||
if (
|
||||
// Only run ML-powered queries on Windows if we have a CLI that supports it.
|
||||
(process.platform !== "win32" ||
|
||||
(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_ML_POWERED_QUERIES_WINDOWS))) &&
|
||||
languages.includes("javascript") &&
|
||||
if (languages.includes("javascript") &&
|
||||
(found === "security-experimental" ||
|
||||
found === "security-extended" ||
|
||||
found === "security-and-quality") &&
|
||||
@@ -188,13 +184,13 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, workspacePath,
|
||||
let absoluteQueryPath = path.join(workspacePath, localQueryPath);
|
||||
// Check the file exists
|
||||
if (!fs.existsSync(absoluteQueryPath)) {
|
||||
throw new Error(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
throw new util_1.UserError(getLocalPathDoesNotExist(configFile, localQueryPath));
|
||||
}
|
||||
// Call this after checking file exists, because it'll fail if file doesn't exist
|
||||
absoluteQueryPath = fs.realpathSync(absoluteQueryPath);
|
||||
// Check the local path doesn't jump outside the repo using '..' or symlinks
|
||||
if (!(absoluteQueryPath + path.sep).startsWith(fs.realpathSync(workspacePath) + path.sep)) {
|
||||
throw new Error(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
throw new util_1.UserError(getLocalPathOutsideOfRepository(configFile, localQueryPath));
|
||||
}
|
||||
const extraSearchPath = workspacePath;
|
||||
await runResolveQueries(codeQL, resultMap, [absoluteQueryPath], extraSearchPath);
|
||||
@@ -205,7 +201,7 @@ async function addLocalQueries(codeQL, resultMap, localQueryPath, workspacePath,
|
||||
async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetails, logger, configFile) {
|
||||
let tok = queryUses.split("@");
|
||||
if (tok.length !== 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
throw new util_1.UserError(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const ref = tok[1];
|
||||
tok = tok[0].split("/");
|
||||
@@ -213,11 +209,11 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
// The second token is the repo
|
||||
// The rest is a path, if there is more than one token combine them to form the full path
|
||||
if (tok.length < 2) {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
throw new util_1.UserError(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
// Check none of the parts of the repository name are empty
|
||||
if (tok[0].trim() === "" || tok[1].trim() === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile, queryUses));
|
||||
throw new util_1.UserError(getQueryUsesInvalid(configFile, queryUses));
|
||||
}
|
||||
const nwo = `${tok[0]}/${tok[1]}`;
|
||||
// Checkout the external repository
|
||||
@@ -243,7 +239,7 @@ async function addRemoteQueries(codeQL, resultMap, queryUses, tempDir, apiDetail
|
||||
async function parseQueryUses(languages, codeQL, resultMap, packs, queryUses, tempDir, workspacePath, apiDetails, features, logger, configFile) {
|
||||
queryUses = queryUses.trim();
|
||||
if (queryUses === "") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
throw new util_1.UserError(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
// Check for the local path case before we start trying to parse the repository name
|
||||
if (queryUses.startsWith("./")) {
|
||||
@@ -284,12 +280,12 @@ function validateAndSanitisePath(originalPath, propertyName, configFile, logger)
|
||||
}
|
||||
// An empty path is not allowed as it's meaningless
|
||||
if (newPath === "") {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
|
||||
throw new util_1.UserError(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" is not an invalid path. ` +
|
||||
`It is not necessary to include it, and it is not allowed to exclude it.`));
|
||||
}
|
||||
// Check for illegal uses of **
|
||||
if (newPath.match(pathStarsRegex)) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
|
||||
throw new util_1.UserError(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an invalid "**" wildcard. ` +
|
||||
`They must be immediately preceded and followed by a slash as in "/**/", or come at the start or end.`));
|
||||
}
|
||||
// Check for other regex characters that we don't support.
|
||||
@@ -302,7 +298,7 @@ function validateAndSanitisePath(originalPath, propertyName, configFile, logger)
|
||||
// This may not play nicely with project layouts.
|
||||
// This restriction can be lifted later if we determine they are ok.
|
||||
if (newPath.indexOf("\\") !== -1) {
|
||||
throw new Error(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
|
||||
throw new util_1.UserError(getConfigFilePropertyError(configFile, propertyName, `"${originalPath}" contains an "\\" character. These are not allowed in filters. ` +
|
||||
`If running on windows we recommend using "/" instead for path filters.`));
|
||||
}
|
||||
return newPath;
|
||||
@@ -408,7 +404,7 @@ exports.getUnknownLanguagesError = getUnknownLanguagesError;
|
||||
*/
|
||||
async function getLanguagesInRepo(repository, logger) {
|
||||
logger.debug(`GitHub repo ${repository.owner} ${repository.repo}`);
|
||||
const response = await api.getApiClient().repos.listLanguages({
|
||||
const response = await api.getApiClient().rest.repos.listLanguages({
|
||||
owner: repository.owner,
|
||||
repo: repository.repo,
|
||||
});
|
||||
@@ -452,7 +448,7 @@ async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
// If the languages parameter was not given and no languages were
|
||||
// detected then fail here as this is a workflow configuration error.
|
||||
if (languages.length === 0) {
|
||||
throw new Error(getNoLanguagesError());
|
||||
throw new util_1.UserError(getNoLanguagesError());
|
||||
}
|
||||
// Make sure they are supported
|
||||
const parsedLanguages = [];
|
||||
@@ -470,7 +466,7 @@ async function getLanguages(codeQL, languagesInput, repository, logger) {
|
||||
// Any unknown languages here would have come directly from the input
|
||||
// since we filter unknown languages coming from the GitHub API.
|
||||
if (unknownLanguages.length > 0) {
|
||||
throw new Error(getUnknownLanguagesError(unknownLanguages));
|
||||
throw new util_1.UserError(getUnknownLanguagesError(unknownLanguages));
|
||||
}
|
||||
return parsedLanguages;
|
||||
}
|
||||
@@ -595,10 +591,10 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
// even though we don't use the value yet.
|
||||
if (NAME_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
throw new util_1.UserError(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
throw new util_1.UserError(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
const languages = await getLanguages(codeQL, languagesInput, repository, logger);
|
||||
@@ -614,7 +610,7 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
let disableDefaultQueries = false;
|
||||
if (DISABLE_DEFAULT_QUERIES_PROPERTY in parsedYAML) {
|
||||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
throw new util_1.UserError(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||
}
|
||||
@@ -635,33 +631,33 @@ async function loadConfig(languagesInput, rawQueriesInput, rawPacksInput, config
|
||||
QUERIES_PROPERTY in parsedYAML) {
|
||||
const queriesArr = parsedYAML[QUERIES_PROPERTY];
|
||||
if (!Array.isArray(queriesArr)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
throw new util_1.UserError(getQueriesInvalid(configFile));
|
||||
}
|
||||
for (const query of queriesArr) {
|
||||
if (typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueriesMissingUses(configFile));
|
||||
throw new util_1.UserError(getQueriesMissingUses(configFile));
|
||||
}
|
||||
await parseQueryUses(languages, codeQL, queries, packs, query[QUERIES_USES_PROPERTY], tempDir, workspacePath, apiDetails, features, logger, configFile);
|
||||
}
|
||||
}
|
||||
if (PATHS_IGNORE_PROPERTY in parsedYAML) {
|
||||
if (!Array.isArray(parsedYAML[PATHS_IGNORE_PROPERTY])) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
throw new util_1.UserError(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
for (const ignorePath of parsedYAML[PATHS_IGNORE_PROPERTY]) {
|
||||
if (typeof ignorePath !== "string" || ignorePath === "") {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
throw new util_1.UserError(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
pathsIgnore.push(validateAndSanitisePath(ignorePath, PATHS_IGNORE_PROPERTY, configFile, logger));
|
||||
}
|
||||
}
|
||||
if (PATHS_PROPERTY in parsedYAML) {
|
||||
if (!Array.isArray(parsedYAML[PATHS_PROPERTY])) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
throw new util_1.UserError(getPathsInvalid(configFile));
|
||||
}
|
||||
for (const includePath of parsedYAML[PATHS_PROPERTY]) {
|
||||
if (typeof includePath !== "string" || includePath === "") {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
throw new util_1.UserError(getPathsInvalid(configFile));
|
||||
}
|
||||
paths.push(validateAndSanitisePath(includePath, PATHS_PROPERTY, configFile, logger));
|
||||
}
|
||||
@@ -727,7 +723,7 @@ function parseQueriesFromInput(rawQueriesInput, queriesInputCombines) {
|
||||
? rawQueriesInput.trim().slice(1).trim()
|
||||
: rawQueriesInput?.trim() ?? "";
|
||||
if (queriesInputCombines && trimmedInput.length === 0) {
|
||||
throw new Error(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
throw new util_1.UserError(getConfigFilePropertyError(undefined, "queries", "A '+' was used in the 'queries' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
}
|
||||
return trimmedInput.split(",").map((query) => ({ uses: query.trim() }));
|
||||
}
|
||||
@@ -754,12 +750,12 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
|
||||
else {
|
||||
// this is an error since multi-language analysis requires
|
||||
// packs split by language
|
||||
throw new Error(getPacksInvalidSplit(configFile));
|
||||
throw new util_1.UserError(getPacksInvalidSplit(configFile));
|
||||
}
|
||||
}
|
||||
for (const [lang, packsArr] of Object.entries(packsByLanguage)) {
|
||||
if (!Array.isArray(packsArr)) {
|
||||
throw new Error(getPacksInvalid(configFile));
|
||||
throw new util_1.UserError(getPacksInvalid(configFile));
|
||||
}
|
||||
if (!languages.includes(lang)) {
|
||||
// This particular language is not being analyzed in this run.
|
||||
@@ -769,7 +765,7 @@ function parsePacksFromConfig(packsByLanguage, languages, configFile, logger) {
|
||||
}
|
||||
else {
|
||||
// This language is invalid, probably a misspelling
|
||||
throw new Error(getPacksRequireLanguage(configFile, lang));
|
||||
throw new util_1.UserError(getPacksRequireLanguage(configFile, lang));
|
||||
}
|
||||
}
|
||||
packs[lang] = packsArr.map((packStr) => validatePackSpecification(packStr, configFile));
|
||||
@@ -782,16 +778,16 @@ function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
|
||||
return undefined;
|
||||
}
|
||||
if (languages.length > 1) {
|
||||
throw new Error("Cannot specify a 'packs' input in a multi-language analysis. Use a codeql-config.yml file instead and specify packs by language.");
|
||||
throw new util_1.UserError("Cannot specify a 'packs' input in a multi-language analysis. Use a codeql-config.yml file instead and specify packs by language.");
|
||||
}
|
||||
else if (languages.length === 0) {
|
||||
throw new Error("No languages specified. Cannot process the packs input.");
|
||||
throw new util_1.UserError("No languages specified. Cannot process the packs input.");
|
||||
}
|
||||
rawPacksInput = rawPacksInput.trim();
|
||||
if (packsInputCombines) {
|
||||
rawPacksInput = rawPacksInput.trim().substring(1).trim();
|
||||
if (!rawPacksInput) {
|
||||
throw new Error(getConfigFilePropertyError(undefined, "packs", "A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
throw new util_1.UserError(getConfigFilePropertyError(undefined, "packs", "A '+' was used in the 'packs' input to specify that you wished to add some packs to your CodeQL analysis. However, no packs were specified. Please either remove the '+' or specify some packs."));
|
||||
}
|
||||
}
|
||||
return {
|
||||
@@ -821,7 +817,7 @@ function parsePacksFromInput(rawPacksInput, languages, packsInputCombines) {
|
||||
*/
|
||||
function parsePacksSpecification(packStr, configFile) {
|
||||
if (typeof packStr !== "string") {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
throw new util_1.UserError(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
packStr = packStr.trim();
|
||||
const atIndex = packStr.indexOf("@");
|
||||
@@ -842,7 +838,7 @@ function parsePacksSpecification(packStr, configFile) {
|
||||
? packStr.slice(pathStart, pathEnd).trim()
|
||||
: undefined;
|
||||
if (!PACK_IDENTIFIER_PATTERN.test(packName)) {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
throw new util_1.UserError(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
if (version) {
|
||||
try {
|
||||
@@ -850,7 +846,7 @@ function parsePacksSpecification(packStr, configFile) {
|
||||
}
|
||||
catch (e) {
|
||||
// The range string is invalid. OK to ignore the caught error
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
throw new util_1.UserError(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
}
|
||||
if (packPath &&
|
||||
@@ -861,11 +857,11 @@ function parsePacksSpecification(packStr, configFile) {
|
||||
// which seems more awkward.
|
||||
path.normalize(packPath).split(path.sep).join("/") !==
|
||||
packPath.split(path.sep).join("/"))) {
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
throw new util_1.UserError(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
if (!packPath && pathStart) {
|
||||
// 0 length path
|
||||
throw new Error(getPacksStrInvalid(packStr, configFile));
|
||||
throw new util_1.UserError(getPacksStrInvalid(packStr, configFile));
|
||||
}
|
||||
return {
|
||||
name: packName,
|
||||
@@ -891,7 +887,7 @@ function parsePacks(rawPacksFromConfig, rawPacksFromInput, packsInputCombines, l
|
||||
}
|
||||
if (!packsInputCombines) {
|
||||
if (!packsFromInput) {
|
||||
throw new Error(getPacksInvalid(configFile));
|
||||
throw new util_1.UserError(getPacksInvalid(configFile));
|
||||
}
|
||||
return packsFromInput;
|
||||
}
|
||||
@@ -964,7 +960,7 @@ async function initConfig(languagesInput, queriesInput, packsInput, registriesIn
|
||||
const hasCustomQueries = config.queries[language]?.custom.length > 0;
|
||||
const hasPacks = (config.packs[language]?.length || 0) > 0;
|
||||
if (!hasPacks && !hasBuiltinQueries && !hasCustomQueries) {
|
||||
throw new Error(`Did not detect any queries to run for ${language}. ` +
|
||||
throw new util_1.UserError(`Did not detect any queries to run for ${language}. ` +
|
||||
"Please make sure that the default queries are enabled, or you are specifying queries to run.");
|
||||
}
|
||||
}
|
||||
@@ -982,7 +978,7 @@ function parseRegistries(registriesInput) {
|
||||
: undefined;
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error("Invalid registries input. Must be a YAML string.");
|
||||
throw new util_1.UserError("Invalid registries input. Must be a YAML string.");
|
||||
}
|
||||
}
|
||||
function isLocal(configPath) {
|
||||
@@ -995,11 +991,11 @@ function isLocal(configPath) {
|
||||
function getLocalConfig(configFile, workspacePath) {
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
throw new util_1.UserError(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
}
|
||||
// Error if the file does not exist
|
||||
if (!fs.existsSync(configFile)) {
|
||||
throw new Error(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
throw new util_1.UserError(getConfigFileDoesNotExistErrorMessage(configFile));
|
||||
}
|
||||
return yaml.load(fs.readFileSync(configFile, "utf8"));
|
||||
}
|
||||
@@ -1009,11 +1005,11 @@ async function getRemoteConfig(configFile, apiDetails) {
|
||||
const pieces = format.exec(configFile);
|
||||
// 5 = 4 groups + the whole expression
|
||||
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
|
||||
throw new Error(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
throw new util_1.UserError(getConfigFileRepoFormatInvalidMessage(configFile));
|
||||
}
|
||||
const response = await api
|
||||
.getApiClientWithExternalAuth(apiDetails)
|
||||
.repos.getContent({
|
||||
.rest.repos.getContent({
|
||||
owner: pieces.groups.owner,
|
||||
repo: pieces.groups.repo,
|
||||
path: pieces.groups.path,
|
||||
@@ -1024,10 +1020,10 @@ async function getRemoteConfig(configFile, apiDetails) {
|
||||
fileContents = response.data.content;
|
||||
}
|
||||
else if (Array.isArray(response.data)) {
|
||||
throw new Error(getConfigFileDirectoryGivenMessage(configFile));
|
||||
throw new util_1.UserError(getConfigFileDirectoryGivenMessage(configFile));
|
||||
}
|
||||
else {
|
||||
throw new Error(getConfigFileFormatInvalidMessage(configFile));
|
||||
throw new util_1.UserError(getConfigFileFormatInvalidMessage(configFile));
|
||||
}
|
||||
return yaml.load(Buffer.from(fileContents, "base64").toString("binary"));
|
||||
}
|
||||
@@ -1112,7 +1108,7 @@ async function generateRegistries(registriesInput, codeQL, tempDir, logger) {
|
||||
let qlconfigFile;
|
||||
if (registries) {
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeQL, codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD))) {
|
||||
throw new Error(`The 'registries' input is not supported on CodeQL CLI versions earlier than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}. Please upgrade to CodeQL CLI version ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD} or later.`);
|
||||
throw new util_1.UserError(`The 'registries' input is not supported on CodeQL CLI versions earlier than ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD}. Please upgrade to CodeQL CLI version ${codeql_1.CODEQL_VERSION_GHES_PACK_DOWNLOAD} or later.`);
|
||||
}
|
||||
// generate a qlconfig.yml file to hold the registry configs.
|
||||
const qlconfig = createRegistriesBlock(registries);
|
||||
@@ -1139,7 +1135,7 @@ exports.generateRegistries = generateRegistries;
|
||||
function createRegistriesBlock(registries) {
|
||||
if (!Array.isArray(registries) ||
|
||||
registries.some((r) => !r.url || !r.packages)) {
|
||||
throw new Error("Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
throw new util_1.UserError("Invalid 'registries' input. Must be an array of objects with 'url' and 'packages' properties.");
|
||||
}
|
||||
// be sure to remove the `token` field from the registry before writing it to disk.
|
||||
const safeRegistries = registries.map((registry) => ({
|
||||
|
||||
File diff suppressed because one or more lines are too long
128
lib/config-utils.test.js
generated
128
lib/config-utils.test.js
generated
@@ -40,7 +40,7 @@ const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const repository_1 = require("./repository");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
const util_1 = require("./util");
|
||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||
const sampleApiDetails = {
|
||||
auth: "token",
|
||||
@@ -49,7 +49,7 @@ const sampleApiDetails = {
|
||||
apiURL: undefined,
|
||||
registriesAuthTokens: undefined,
|
||||
};
|
||||
const gitHubVersion = { type: util.GitHubVariant.DOTCOM };
|
||||
const gitHubVersion = { type: util_1.GitHubVariant.DOTCOM };
|
||||
// Returns the filepath of the newly-created file
|
||||
function createConfigFile(inputFileContents, tmpDir) {
|
||||
const configFilePath = path.join(tmpDir, "input");
|
||||
@@ -63,7 +63,7 @@ function mockGetContents(content) {
|
||||
data: content,
|
||||
};
|
||||
const spyGetContents = sinon
|
||||
.stub(client.repos, "getContent")
|
||||
.stub(client.rest.repos, "getContent")
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
.resolves(response);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
@@ -80,11 +80,11 @@ function mockListLanguages(languages) {
|
||||
response.data[language] = 123;
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
sinon.stub(client.repos, "listLanguages").resolves(response);
|
||||
sinon.stub(client.rest.repos, "listLanguages").resolves(response);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
}
|
||||
(0, ava_1.default)("load empty config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const languages = "javascript,python";
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
@@ -107,7 +107,7 @@ function mockListLanguages(languages) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("loading config saves config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
@@ -142,18 +142,18 @@ function mockListLanguages(languages) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load input outside of workspace", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
try {
|
||||
await configUtils.initConfig(undefined, undefined, undefined, undefined, "../input", undefined, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getConfigFileOutsideWorkspaceErrorMessage(path.join(tmpDir, "../input"))));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load non-local input with invalid repo syntax", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
// no filename given, just a repo
|
||||
const configFile = "octo-org/codeql-config@main";
|
||||
try {
|
||||
@@ -161,12 +161,12 @@ function mockListLanguages(languages) {
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getConfigFileRepoFormatInvalidMessage("octo-org/codeql-config@main")));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load non-existent input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const languages = "javascript";
|
||||
const configFile = "input";
|
||||
t.false(fs.existsSync(path.join(tmpDir, configFile)));
|
||||
@@ -175,12 +175,12 @@ function mockListLanguages(languages) {
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getConfigFileDoesNotExistErrorMessage(path.join(tmpDir, "input"))));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("load non-empty input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
return {
|
||||
@@ -253,7 +253,7 @@ function mockListLanguages(languages) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Default queries are used", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
// Check that the default behaviour is to add the default queries.
|
||||
// In this case if a config file is specified but does not include
|
||||
// the disable-default-queries field.
|
||||
@@ -314,7 +314,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
};
|
||||
}
|
||||
(0, ava_1.default)("Queries can be specified in config file", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
@@ -347,7 +347,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries from config file can be overridden in workflow file", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const inputFileContents = `
|
||||
name: my config
|
||||
queries:
|
||||
@@ -383,7 +383,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be used in tandem with the 'disable default queries' option", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
||||
const inputFileContents = `
|
||||
@@ -417,7 +417,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Multiple queries can be specified in workflow file, no config file required", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
fs.mkdirSync(path.join(tmpDir, "override1"));
|
||||
fs.mkdirSync(path.join(tmpDir, "override2"));
|
||||
const testQueries = "./override1,./override2";
|
||||
@@ -450,7 +450,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries in workflow file can be added to the set of queries without overriding config file", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
||||
const inputFileContents = `
|
||||
@@ -496,7 +496,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Queries can be specified using config input", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const configInput = `
|
||||
name: my config
|
||||
queries:
|
||||
@@ -538,7 +538,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Using config input and file together, config input should be used.", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tmpDir;
|
||||
const inputFileContents = `
|
||||
@@ -587,7 +587,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Invalid queries in workflow file handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const queries = "foo/bar@v1@v3";
|
||||
const languages = "javascript";
|
||||
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||
@@ -611,12 +611,12 @@ function queriesToResolvedQueryForm(queries) {
|
||||
t.fail("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getQueryUsesInvalid(undefined, "foo/bar@v1@v3")));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("API client used when reading remote config", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
return {
|
||||
@@ -658,7 +658,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Remote config handles the case where a directory is provided", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const dummyResponse = []; // directories are returned as arrays
|
||||
mockGetContents(dummyResponse);
|
||||
const repoReference = "octo-org/codeql-config/config.yaml@main";
|
||||
@@ -667,12 +667,12 @@ function queriesToResolvedQueryForm(queries) {
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getConfigFileDirectoryGivenMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Invalid format of remote config handled correctly", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const dummyResponse = {
|
||||
// note no "content" property here
|
||||
};
|
||||
@@ -683,12 +683,12 @@ function queriesToResolvedQueryForm(queries) {
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getConfigFileFormatInvalidMessage(repoReference)));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("No detected languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
mockListLanguages([]);
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveLanguages() {
|
||||
@@ -703,24 +703,24 @@ function queriesToResolvedQueryForm(queries) {
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getNoLanguagesError()));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getNoLanguagesError()));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Unknown languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const languages = "rubbish,english";
|
||||
try {
|
||||
await configUtils.initConfig(languages, undefined, undefined, undefined, undefined, undefined, undefined, false, false, "", "", { owner: "github", repo: "example" }, tmpDir, (0, codeql_1.getCachedCodeQL)(), tmpDir, gitHubVersion, sampleApiDetails, (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(configUtils.getUnknownLanguagesError(["rubbish", "english"])));
|
||||
t.deepEqual(err, new util_1.UserError(configUtils.getUnknownLanguagesError(["rubbish", "english"])));
|
||||
}
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Config specifies packages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
return {
|
||||
@@ -749,7 +749,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("Config specifies packages for multiple languages", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
return {
|
||||
@@ -807,7 +807,7 @@ function queriesToResolvedQueryForm(queries) {
|
||||
});
|
||||
function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGenerator) {
|
||||
(0, ava_1.default)(`load invalid input - ${testName}`, async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async resolveQueries() {
|
||||
return {
|
||||
@@ -829,7 +829,7 @@ function doInvalidInputTest(testName, inputFileContents, expectedErrorMessageGen
|
||||
throw new Error("initConfig did not throw error");
|
||||
}
|
||||
catch (err) {
|
||||
t.deepEqual(err, new Error(expectedErrorMessageGenerator(inputFile)));
|
||||
t.deepEqual(err, new util_1.UserError(expectedErrorMessageGenerator(inputFile)));
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1064,7 +1064,7 @@ parseInputAndConfigErrorMacro.title = (providedTitle) => `Parse Packs input and
|
||||
(0, ava_1.default)("input with invalid pack name", parseInputAndConfigErrorMacro, {}, " xxx", [languages_1.Language.cpp], false, /"xxx" is not a valid pack/);
|
||||
const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
exec: async (t, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
async getVersion() {
|
||||
return codeQLVersion;
|
||||
@@ -1101,37 +1101,23 @@ const mlPoweredQueriesMacro = ava_1.default.macro({
|
||||
});
|
||||
// macro, codeQLVersion, isMlPoweredQueriesEnabled, packsInput, queriesInput, expectedVersionString
|
||||
// Test that ML-powered queries aren't run when the feature is off.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", false, undefined, "security-extended", undefined);
|
||||
// Test that the ~0.1.0 version of ML-powered queries is run on v2.8.3 of the CLI.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.3", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.1.0");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.12.3", false, undefined, "security-extended", undefined);
|
||||
// Test that ML-powered queries aren't run when the user hasn't specified that we should run the
|
||||
// `security-extended`, `security-and-quality`, or `security-experimental` query suite.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.7.5", true, undefined, undefined, undefined);
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-extended` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-extended", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on non-Windows platforms running `security-and-quality` on
|
||||
// versions of the CodeQL CLI prior to 2.9.0.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.8.5", true, undefined, "security-and-quality", process.platform === "win32" ? undefined : "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL CLI
|
||||
// 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-extended", "~0.2.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.0+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, undefined, "security-and-quality", "~0.2.0");
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.12.3", true, undefined, undefined, undefined);
|
||||
// Test that we don't inject an ML-powered query pack if the user has already specified one.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.0", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-extended", "~0.3.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.9.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.3", true, undefined, "security-and-quality", "~0.3.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-extended` on CodeQL
|
||||
// CLI 2.11.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.12.3", true, "codeql/javascript-experimental-atm-queries@0.0.1", "security-and-quality", "0.0.1");
|
||||
// Test that ML-powered queries ~0.3.0 are run on all platforms running `security-extended` on
|
||||
// CodeQL CLI 2.9.4+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.4", true, undefined, "security-extended", "~0.3.0");
|
||||
// Test that ML-powered queries ~0.3.0 are run on all platforms running `security-and-quality` on
|
||||
// CodeQL CLI 2.9.4+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.9.4", true, undefined, "security-and-quality", "~0.3.0");
|
||||
// Test that ML-powered queries ~0.4.0 are run on all platforms running `security-extended` on
|
||||
// CodeQL CLI 2.11.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-extended", "~0.4.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-and-quality` on CodeQL
|
||||
// CLI 2.11.3+.
|
||||
// Test that ML-powered queries ~0.4.0 are run on all platforms running `security-and-quality` on
|
||||
// CodeQL CLI 2.11.3+.
|
||||
(0, ava_1.default)(mlPoweredQueriesMacro, "2.11.3", true, undefined, "security-and-quality", "~0.4.0");
|
||||
// Test that ML-powered queries are run on all platforms running `security-experimental` on CodeQL
|
||||
// CLI 2.12.1+.
|
||||
@@ -1190,7 +1176,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Packs input with no languages", " + a/b, c/d ", undefined, [], /No languages specified/);
|
||||
(0, ava_1.default)(calculateAugmentationErrorMacro, "Invalid packs", " a-pack-without-a-scope ", undefined, [languages_1.Language.javascript], /"a-pack-without-a-scope" is not a valid pack/);
|
||||
(0, ava_1.default)("downloadPacks-no-registries", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const packDownloadStub = sinon.stub();
|
||||
packDownloadStub.callsFake((packs) => ({
|
||||
packs,
|
||||
@@ -1217,7 +1203,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)("downloadPacks-with-registries", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = undefined;
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
@@ -1284,7 +1270,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)("downloadPacks-with-registries fails on 2.10.3", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
@@ -1311,7 +1297,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
(0, ava_1.default)("downloadPacks-with-registries fails with invalid registries block", async (t) => {
|
||||
// same thing, but this time include a registries block and
|
||||
// associated env vars
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env.GITHUB_TOKEN = "not-a-token";
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "not-a-registries-auth";
|
||||
const logger = (0, logging_1.getRunnerLogger)(true);
|
||||
@@ -1338,7 +1324,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
// the happy path for generateRegistries is already tested in downloadPacks.
|
||||
// these following tests are for the error cases and when nothing is generated.
|
||||
(0, ava_1.default)("no generateRegistries when CLI is too old", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
// no slash
|
||||
@@ -1356,7 +1342,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("no generateRegistries when registries is undefined", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
const registriesInput = undefined;
|
||||
const codeQL = (0, codeql_1.setCodeQL)({
|
||||
// Accepted CLI versions are 2.10.4 or higher
|
||||
@@ -1369,7 +1355,7 @@ const calculateAugmentationErrorMacro = ava_1.default.macro({
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
return await (0, util_1.withTmpDir)(async (tmpDir) => {
|
||||
process.env.CODEQL_REGISTRIES_AUTH = "original";
|
||||
const registriesInput = yaml.dump([
|
||||
{
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.13.5",
|
||||
"cliVersion": "2.13.5",
|
||||
"priorBundleVersion": "codeql-bundle-v2.13.4",
|
||||
"priorCliVersion": "2.13.4"
|
||||
"bundleVersion": "codeql-bundle-v2.14.0",
|
||||
"cliVersion": "2.14.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.13.5",
|
||||
"priorCliVersion": "2.13.5"
|
||||
}
|
||||
|
||||
2
lib/environment.js
generated
2
lib/environment.js
generated
@@ -51,5 +51,5 @@ var EnvVar;
|
||||
* rather that the init action.
|
||||
*/
|
||||
EnvVar["WORKFLOW_STARTED_AT"] = "CODEQL_WORKFLOW_STARTED_AT";
|
||||
})(EnvVar = exports.EnvVar || (exports.EnvVar = {}));
|
||||
})(EnvVar || (exports.EnvVar = EnvVar = {}));
|
||||
//# sourceMappingURL=environment.js.map
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA,IAAY,MA8DX;AA9DD,WAAY,MAAM;IAChB,oEAAoE;IACpE,+FAAqF,CAAA;IAErF,2EAA2E;IAC3E,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;;OAIG;IACH,yEAA+D,CAAA;IAE/D;;;OAGG;IACH,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D;;;OAGG;IACH,mEAAyD,CAAA;IAEzD,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,mEAAyD,CAAA;IAEzD,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,kFAAkF;IAClF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;AACpD,CAAC,EA9DW,MAAM,GAAN,cAAM,KAAN,cAAM,QA8DjB"}
|
||||
{"version":3,"file":"environment.js","sourceRoot":"","sources":["../src/environment.ts"],"names":[],"mappings":";;;AAAA,IAAY,MA8DX;AA9DD,WAAY,MAAM;IAChB,oEAAoE;IACpE,+FAAqF,CAAA;IAErF,2EAA2E;IAC3E,qEAA2D,CAAA;IAE3D;;;OAGG;IACH,yFAA+E,CAAA;IAE/E;;;;OAIG;IACH,yEAA+D,CAAA;IAE/D;;;OAGG;IACH,6DAAmD,CAAA;IAEnD;;;OAGG;IACH,uEAA6D,CAAA;IAE7D;;;OAGG;IACH,mEAAyD,CAAA;IAEzD,6CAA6C;IAC7C,uCAA6B,CAAA;IAE7B,mEAAyD,CAAA;IAEzD,mFAAmF;IACnF,6FAAmF,CAAA;IAEnF,kFAAkF;IAClF,+CAAqC,CAAA;IAErC,mEAAyD,CAAA;IAEzD,kEAAkE;IAClE,2CAAiC,CAAA;IAEjC;;;;;;OAMG;IACH,4DAAkD,CAAA;AACpD,CAAC,EA9DW,MAAM,sBAAN,MAAM,QA8DjB"}
|
||||
17
lib/feature-flags.js
generated
17
lib/feature-flags.js
generated
@@ -33,6 +33,11 @@ const defaults = __importStar(require("./defaults.json"));
|
||||
const util = __importStar(require("./util"));
|
||||
const DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_";
|
||||
const DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled";
|
||||
/**
|
||||
* Feature enablement as returned by the GitHub API endpoint.
|
||||
*
|
||||
* Each value of this enum should end with `_enabled`.
|
||||
*/
|
||||
var Feature;
|
||||
(function (Feature) {
|
||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||
@@ -42,8 +47,9 @@ var Feature;
|
||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||
Feature["NewAnalysisSummaryEnabled"] = "new_analysis_summary_enabled";
|
||||
Feature["QaTelemetryEnabled"] = "qa_telemetry_enabled";
|
||||
Feature["ScalingReservedRamEnabled"] = "scaling_reserved_ram_enabled";
|
||||
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||
})(Feature || (exports.Feature = Feature = {}));
|
||||
exports.featureConfig = {
|
||||
[Feature.DisableKotlinAnalysisEnabled]: {
|
||||
envVar: "CODEQL_DISABLE_KOTLIN_ANALYSIS",
|
||||
@@ -62,7 +68,7 @@ exports.featureConfig = {
|
||||
},
|
||||
[Feature.MlPoweredQueriesEnabled]: {
|
||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||
minimumVersion: "2.7.5",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.NewAnalysisSummaryEnabled]: {
|
||||
@@ -75,6 +81,11 @@ exports.featureConfig = {
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.ScalingReservedRamEnabled]: {
|
||||
envVar: "CODEQL_ACTION_SCALING_RESERVED_RAM",
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.UploadFailedSarifEnabled]: {
|
||||
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||
minimumVersion: "2.11.3",
|
||||
@@ -305,7 +316,7 @@ class GitHubFeatureFlags {
|
||||
this.logger.warning("This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
|
||||
"As a result, it will not be opted into any experimental features. " +
|
||||
"This could be because the Action is running on a pull request from a fork. If not, " +
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e}`);
|
||||
`please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`);
|
||||
this.hasAccessedRemoteFeatureFlags = false;
|
||||
return {};
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
5
lib/feature-flags.test.js
generated
5
lib/feature-flags.test.js
generated
@@ -309,6 +309,11 @@ for (const variant of [util_1.GitHubVariant.GHAE, util_1.GitHubVariant.GHES]) {
|
||||
"Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.") !== undefined);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("feature flags should end with _enabled", async (t) => {
|
||||
for (const feature of Object.values(feature_flags_1.Feature)) {
|
||||
t.assert(feature.endsWith("_enabled"), `${feature} should end with '_enabled'`);
|
||||
}
|
||||
});
|
||||
function assertAllFeaturesUndefinedInApi(t, loggedMessages) {
|
||||
for (const feature of Object.keys(feature_flags_1.featureConfig)) {
|
||||
t.assert(loggedMessages.find((v) => v.type === "debug" &&
|
||||
|
||||
File diff suppressed because one or more lines are too long
4
lib/init-action.js
generated
4
lib/init-action.js
generated
@@ -154,7 +154,7 @@ async function run() {
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "aborted", startedAt, error.message, error.stack));
|
||||
await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", error instanceof util_1.UserError ? "user-error" : "aborted", startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -170,7 +170,7 @@ async function run() {
|
||||
// options at https://codeql.github.com/docs/codeql-cli/manual/database-trace-command/
|
||||
// for details.
|
||||
core.exportVariable("CODEQL_RAM", process.env["CODEQL_RAM"] ||
|
||||
(0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram")).toString());
|
||||
(await (0, util_1.getMemoryFlagValue)((0, actions_util_1.getOptionalInput)("ram"), features)).toString());
|
||||
core.exportVariable("CODEQL_THREADS", (0, util_1.getThreadsFlagValue)((0, actions_util_1.getOptionalInput)("threads"), logger).toString());
|
||||
// Disable Kotlin extractor if feature flag set
|
||||
if (await features.getValue(feature_flags_1.Feature.DisableKotlinAnalysisEnabled)) {
|
||||
|
||||
File diff suppressed because one or more lines are too long
2
lib/init.js
generated
2
lib/init.js
generated
@@ -39,7 +39,7 @@ var ToolsSource;
|
||||
ToolsSource["Local"] = "LOCAL";
|
||||
ToolsSource["Toolcache"] = "TOOLCACHE";
|
||||
ToolsSource["Download"] = "DOWNLOAD";
|
||||
})(ToolsSource = exports.ToolsSource || (exports.ToolsSource = {}));
|
||||
})(ToolsSource || (exports.ToolsSource = ToolsSource = {}));
|
||||
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger) {
|
||||
logger.startGroup("Setup CodeQL tools");
|
||||
const { codeql, toolsDownloadDurationMs, toolsSource, toolsVersion } = await (0, codeql_1.setupCodeQL)(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, logger, true);
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAE/B,IAAY,WAKX;AALD,WAAY,WAAW;IACrB,kCAAmB,CAAA;IACnB,8BAAe,CAAA;IACf,sCAAuB,CAAA;IACvB,oCAAqB,CAAA;AACvB,CAAC,EALW,WAAW,GAAX,mBAAW,KAAX,mBAAW,QAKtB;AAEM,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,MAAc;IAOd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,GAClE,MAAM,IAAA,oBAAW,EACf,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACJ,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;AACxE,CAAC;AA3BD,gCA2BC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,WAA+B,EAC/B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,QAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,WAAW,EACX,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,QAAQ,EACR,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9CD,gCA8CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,QAA2B,EAC3B,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACrD,IAAI;QACF,wFAAwF;QACxF,qBAAqB;QACrB,8FAA8F;QAC9F,2FAA2F;QAC3F,IAAI,oBAAwC,CAAC;QAC7C,IAAI,YAAgC,CAAC;QACrC,IAAI,MAAM,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;YAC3D,CAAC,EAAE,oBAAoB,EAAE,YAAY,EAAE;gBACrC,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,EACN,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC,CAAC;SACN;QACD,MAAM,WAAW,CAAC,eAAe,CAC/B;YACE,YAAY,EAAE,UAAU,CAAC,IAAI;YAC7B,sBAAsB,EAAE,oBAAoB;SAC7C;QAED,0BAA0B;QAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,QAAQ,EACR,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,CAAC,CAAC;AAC/C,CAAC;AAhDD,0BAgDC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,8BAA8B,CAAC;QACnD,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,uCAAuC,CAAC,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,wCAAwC,CAAC;QAC7D,gEAAgE;QAChE,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,qBAAqB,CAAC,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
{"version":3,"file":"init.js","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAEpD,gEAAkD;AAElD,qCAA+C;AAC/C,4DAA8C;AAI9C,mDAAwE;AACxE,6CAA+B;AAE/B,IAAY,WAKX;AALD,WAAY,WAAW;IACrB,kCAAmB,CAAA;IACnB,8BAAe,CAAA;IACf,sCAAuB,CAAA;IACvB,oCAAqB,CAAA;AACvB,CAAC,EALW,WAAW,2BAAX,WAAW,QAKtB;AAEM,KAAK,UAAU,UAAU,CAC9B,UAA8B,EAC9B,UAA4B,EAC5B,OAAe,EACf,OAA2B,EAC3B,iBAA2C,EAC3C,MAAc;IAOd,MAAM,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC;IACxC,MAAM,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,GAClE,MAAM,IAAA,oBAAW,EACf,UAAU,EACV,UAAU,EACV,OAAO,EACP,OAAO,EACP,iBAAiB,EACjB,MAAM,EACN,IAAI,CACL,CAAC;IACJ,MAAM,MAAM,CAAC,YAAY,EAAE,CAAC;IAC5B,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,uBAAuB,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC;AACxE,CAAC;AA3BD,gCA2BC;AAEM,KAAK,UAAU,UAAU,CAC9B,cAAkC,EAClC,YAAgC,EAChC,UAA8B,EAC9B,eAAmC,EACnC,UAA8B,EAC9B,UAA8B,EAC9B,WAA+B,EAC/B,kBAA2B,EAC3B,SAAkB,EAClB,iBAAyB,EACzB,iBAAyB,EACzB,UAAyB,EACzB,OAAe,EACf,MAAc,EACd,aAAqB,EACrB,aAAiC,EACjC,UAAoC,EACpC,QAA2B,EAC3B,MAAc;IAEd,MAAM,CAAC,UAAU,CAAC,6BAA6B,CAAC,CAAC;IACjD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,UAAU,CACzC,cAAc,EACd,YAAY,EACZ,UAAU,EACV,eAAe,EACf,UAAU,EACV,UAAU,EACV,WAAW,EACX,kBAAkB,EAClB,SAAS,EACT,iBAAiB,EACjB,iBAAiB,EACjB,UAAU,EACV,OAAO,EACP,MAAM,EACN,aAAa,EACb,aAAa,EACb,UAAU,EACV,QAAQ,EACR,MAAM,CACP,CAAC;IACF,aAAa,CAAC,uBAAuB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IACtD,MAAM,CAAC,QAAQ,EAAE,CAAC;IAClB,OAAO,MAAM,CAAC;AAChB,CAAC;AA9CD,gCA8CC;AAEM,KAAK,UAAU,OAAO,CAC3B,MAAc,EACd,MAA0B,EAC1B,UAAkB,EAClB,WAA+B,EAC/B,eAAmC,EACnC,QAA2B,EAC3B,UAAoC,EACpC,MAAc;IAEd,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,UAAU,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACrD,IAAI;QACF,wFAAwF;QACxF,qBAAqB;QACrB,8FAA8F;QAC9F,2FAA2F;QAC3F,IAAI,oBAAwC,CAAC;QAC7C,IAAI,YAAgC,CAAC;QACrC,IAAI,MAAM,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;YAC3D,CAAC,EAAE,oBAAoB,EAAE,YAAY,EAAE;gBACrC,MAAM,WAAW,CAAC,kBAAkB,CAClC,eAAe,EACf,MAAM,EACN,MAAM,CAAC,OAAO,EACd,MAAM,CACP,CAAC,CAAC;SACN;QACD,MAAM,WAAW,CAAC,eAAe,CAC/B;YACE,YAAY,EAAE,UAAU,CAAC,IAAI;YAC7B,sBAAsB,EAAE,oBAAoB;SAC7C;QAED,0BAA0B;QAC1B,KAAK,IAAI,EAAE,CACT,MAAM,MAAM,CAAC,mBAAmB,CAC9B,MAAM,EACN,UAAU,EACV,WAAW,EACX,QAAQ,EACR,YAAY,EACZ,MAAM,CACP,CACJ,CAAC;KACH;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,YAAY,CAAC,CAAC,CAAC,CAAC;KACvB;IACD,OAAO,MAAM,IAAA,uCAAuB,EAAC,MAAM,CAAC,CAAC;AAC/C,CAAC;AAhDD,0BAgDC;AAED;;;;;;;;GAQG;AACH,SAAS,YAAY,CAAC,CAAM;IAC1B,IAAI,CAAC,CAAC,CAAC,YAAY,KAAK,CAAC,EAAE;QACzB,OAAO,CAAC,CAAC;KACV;IAED;IACE,2BAA2B;IAC3B,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,8BAA8B,CAAC;QACnD,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,uCAAuC,CAAC,EAC5D;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CACvB,sDAAsD,CAAC,CAAC,OAAO,EAAE,CAClE,CAAC;KACH;IAED;IACE,+EAA+E;IAC/E,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,wCAAwC,CAAC;QAC7D,gEAAgE;QAChE,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC,qBAAqB,CAAC,EAC1C;QACA,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC;KACtC;IAED,OAAO,CAAC,CAAC;AACX,CAAC;AAEM,KAAK,UAAU,iBAAiB,CAAC,MAAc,EAAE,MAAc;IACpE,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,CAAC;IAE/C,MAAM,aAAa,GAAG,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IAEjE,IAAI;QACF,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,YAAY,CAAC,EAAE;gBACvE,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,mBAAmB,CAAC;aAC9C,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAC7C,CAAC,IAAI,EAAE,CAAC;SACV;QACD,MAAM,MAAM,GAAG,0BAA0B,CAAC;QAC1C,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,EAAE;YAChC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE;gBAC/D,IAAI;gBACJ,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;aAAM;YACL,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE;gBACpE,IAAI;gBACJ,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC;gBAChC,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;aAC/B,CAAC,CAAC,IAAI,EAAE,CAAC;SACX;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,QAAQ,EAAE,CAAC;QAClB,MAAM,CAAC,OAAO,CACZ,gFAAgF,CAAC,IAAI;YACnF,qGAAqG;YACrG,oGAAoG;YACpG,iDAAiD,CACpD,CAAC;QACF,OAAO;KACR;IACD,MAAM,CAAC,QAAQ,EAAE,CAAC;AACpB,CAAC;AAzCD,8CAyCC"}
|
||||
2
lib/languages.js
generated
2
lib/languages.js
generated
@@ -12,7 +12,7 @@ var Language;
|
||||
Language["python"] = "python";
|
||||
Language["ruby"] = "ruby";
|
||||
Language["swift"] = "swift";
|
||||
})(Language = exports.Language || (exports.Language = {}));
|
||||
})(Language || (exports.Language = Language = {}));
|
||||
// Additional names for languages
|
||||
exports.LANGUAGE_ALIASES = {
|
||||
c: Language.cpp,
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIF,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAAA,wCAAwC;AACxC,IAAY,QASX;AATD,WAAY,QAAQ;IAClB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EATW,QAAQ,wBAAR,QAAQ,QASnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAIF,SAAgB,YAAY,CAAC,IAAqB;IAChD,OAAO,wBAAgB,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC;AACxC,CAAC;AAFD,oCAEC;AAED;;;;;;;;;GASG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE;QACxB,OAAO,QAAoB,CAAC;KAC7B;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE;QAChC,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAhBD,sCAgBC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AARD,4CAQC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC;AAFD,8CAEC"}
|
||||
4
lib/setup-codeql.js
generated
4
lib/setup-codeql.js
generated
@@ -91,7 +91,7 @@ async function tryFindCliVersionDotcomOnly(tagName, logger) {
|
||||
logger.debug(`Fetching the GitHub Release for the CodeQL bundle tagged ${tagName}.`);
|
||||
const apiClient = api.getApiClient();
|
||||
const codeQLActionRepository = getCodeQLActionRepository(logger);
|
||||
const release = await apiClient.repos.getReleaseByTag({
|
||||
const release = await apiClient.rest.repos.getReleaseByTag({
|
||||
owner: codeQLActionRepository.split("/")[0],
|
||||
repo: codeQLActionRepository.split("/")[1],
|
||||
tag: tagName,
|
||||
@@ -153,7 +153,7 @@ async function getCodeQLBundleDownloadURL(tagName, apiDetails, variant, logger)
|
||||
}
|
||||
const [repositoryOwner, repositoryName] = repository.split("/");
|
||||
try {
|
||||
const release = await api.getApiClient().repos.getReleaseByTag({
|
||||
const release = await api.getApiClient().rest.repos.getReleaseByTag({
|
||||
owner: repositoryOwner,
|
||||
repo: repositoryName,
|
||||
tag: tagName,
|
||||
|
||||
File diff suppressed because one or more lines are too long
6
lib/testing-utils.js
generated
6
lib/testing-utils.js
generated
@@ -178,8 +178,10 @@ function mockLanguagesInRepo(languages) {
|
||||
});
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
mockClient.returns({
|
||||
repos: {
|
||||
listLanguages,
|
||||
rest: {
|
||||
repos: {
|
||||
listLanguages,
|
||||
},
|
||||
},
|
||||
});
|
||||
return listLanguages;
|
||||
|
||||
File diff suppressed because one or more lines are too long
36
lib/util.js
generated
36
lib/util.js
generated
@@ -108,9 +108,18 @@ exports.withTmpDir = withTmpDir;
|
||||
* from committing too much of the available memory to CodeQL.
|
||||
* @returns number
|
||||
*/
|
||||
function getSystemReservedMemoryMegaBytes() {
|
||||
async function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, features) {
|
||||
// Windows needs more memory for OS processes.
|
||||
return 1024 * (process.platform === "win32" ? 1.5 : 1);
|
||||
const fixedAmount = 1024 * (process.platform === "win32" ? 1.5 : 1);
|
||||
if (await features.getValue(feature_flags_1.Feature.ScalingReservedRamEnabled)) {
|
||||
// Reserve an additional 2% of the total memory, since the amount used by
|
||||
// the kernel for page tables scales with the size of physical memory.
|
||||
const scaledAmount = 0.02 * totalMemoryMegaBytes;
|
||||
return fixedAmount + scaledAmount;
|
||||
}
|
||||
else {
|
||||
return fixedAmount;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get the value of the codeql `--ram` flag as configured by the `ram` input.
|
||||
@@ -119,7 +128,7 @@ function getSystemReservedMemoryMegaBytes() {
|
||||
*
|
||||
* @returns {number} the amount of RAM to use, in megabytes
|
||||
*/
|
||||
function getMemoryFlagValue(userInput) {
|
||||
async function getMemoryFlagValue(userInput, features) {
|
||||
let memoryToUseMegaBytes;
|
||||
if (userInput) {
|
||||
memoryToUseMegaBytes = Number(userInput);
|
||||
@@ -130,7 +139,7 @@ function getMemoryFlagValue(userInput) {
|
||||
else {
|
||||
const totalMemoryBytes = os.totalmem();
|
||||
const totalMemoryMegaBytes = totalMemoryBytes / (1024 * 1024);
|
||||
const reservedMemoryMegaBytes = getSystemReservedMemoryMegaBytes();
|
||||
const reservedMemoryMegaBytes = await getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, features);
|
||||
memoryToUseMegaBytes = totalMemoryMegaBytes - reservedMemoryMegaBytes;
|
||||
}
|
||||
return Math.floor(memoryToUseMegaBytes);
|
||||
@@ -143,8 +152,9 @@ exports.getMemoryFlagValue = getMemoryFlagValue;
|
||||
*
|
||||
* @returns string
|
||||
*/
|
||||
function getMemoryFlag(userInput) {
|
||||
return `--ram=${getMemoryFlagValue(userInput)}`;
|
||||
async function getMemoryFlag(userInput, features) {
|
||||
const megabytes = await getMemoryFlagValue(userInput, features);
|
||||
return `--ram=${megabytes}`;
|
||||
}
|
||||
exports.getMemoryFlag = getMemoryFlag;
|
||||
/**
|
||||
@@ -260,7 +270,7 @@ var GitHubVariant;
|
||||
GitHubVariant[GitHubVariant["GHES"] = 1] = "GHES";
|
||||
GitHubVariant[GitHubVariant["GHAE"] = 2] = "GHAE";
|
||||
GitHubVariant[GitHubVariant["GHE_DOTCOM"] = 3] = "GHE_DOTCOM";
|
||||
})(GitHubVariant = exports.GitHubVariant || (exports.GitHubVariant = {}));
|
||||
})(GitHubVariant || (exports.GitHubVariant = GitHubVariant = {}));
|
||||
async function getGitHubVersion(apiDetails) {
|
||||
// We can avoid making an API request in the standard dotcom case
|
||||
if (parseGitHubUrl(apiDetails.url) === exports.GITHUB_DOTCOM_URL) {
|
||||
@@ -269,7 +279,7 @@ async function getGitHubVersion(apiDetails) {
|
||||
// Doesn't strictly have to be the meta endpoint as we're only
|
||||
// using the response headers which are available on every request.
|
||||
const apiClient = (0, api_client_1.getApiClient)();
|
||||
const response = await apiClient.meta.get();
|
||||
const response = await apiClient.rest.meta.get();
|
||||
// This happens on dotcom, although we expect to have already returned in that
|
||||
// case. This can also serve as a fallback in cases we haven't foreseen.
|
||||
if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === undefined) {
|
||||
@@ -304,7 +314,7 @@ var DisallowedAPIVersionReason;
|
||||
(function (DisallowedAPIVersionReason) {
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_OLD"] = 0] = "ACTION_TOO_OLD";
|
||||
DisallowedAPIVersionReason[DisallowedAPIVersionReason["ACTION_TOO_NEW"] = 1] = "ACTION_TOO_NEW";
|
||||
})(DisallowedAPIVersionReason = exports.DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = {}));
|
||||
})(DisallowedAPIVersionReason || (exports.DisallowedAPIVersionReason = DisallowedAPIVersionReason = {}));
|
||||
function apiVersionInRange(version, minimumVersion, maximumVersion) {
|
||||
if (!semver.satisfies(version, `>=${minimumVersion}`)) {
|
||||
return DisallowedAPIVersionReason.ACTION_TOO_NEW;
|
||||
@@ -445,14 +455,8 @@ async function getMlPoweredJsQueriesPack(codeQL) {
|
||||
if (await codeQlVersionAbove(codeQL, "2.11.3")) {
|
||||
version = "~0.4.0";
|
||||
}
|
||||
else if (await codeQlVersionAbove(codeQL, "2.9.3")) {
|
||||
version = `~0.3.0`;
|
||||
}
|
||||
else if (await codeQlVersionAbove(codeQL, "2.8.4")) {
|
||||
version = `~0.2.0`;
|
||||
}
|
||||
else {
|
||||
version = `~0.1.0`;
|
||||
version = `~0.3.0`;
|
||||
}
|
||||
return (0, config_utils_1.prettyPrintPack)({
|
||||
name: exports.ML_POWERED_JS_QUERIES_PACK_NAME,
|
||||
|
||||
File diff suppressed because one or more lines are too long
29
lib/util.test.js
generated
29
lib/util.test.js
generated
@@ -33,6 +33,7 @@ const github = __importStar(require("@actions/github"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const logging_1 = require("./logging");
|
||||
const testing_utils_1 = require("./testing-utils");
|
||||
const util = __importStar(require("./util"));
|
||||
@@ -42,22 +43,28 @@ const util = __importStar(require("./util"));
|
||||
const toolNames = util.getToolNames(JSON.parse(input));
|
||||
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
|
||||
});
|
||||
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", (t) => {
|
||||
const totalMem = Math.floor(os.totalmem() / (1024 * 1024));
|
||||
const expectedThreshold = process.platform === "win32" ? 1536 : 1024;
|
||||
(0, ava_1.default)("getMemoryFlag() should return the correct --ram flag", async (t) => {
|
||||
const totalMem = os.totalmem() / (1024 * 1024);
|
||||
const fixedAmount = process.platform === "win32" ? 1536 : 1024;
|
||||
const scaledAmount = 0.02 * totalMem;
|
||||
const expectedMemoryValue = Math.floor(totalMem - fixedAmount);
|
||||
const expectedMemoryValueWithScaling = Math.floor(totalMem - fixedAmount - scaledAmount);
|
||||
const tests = [
|
||||
[undefined, `--ram=${totalMem - expectedThreshold}`],
|
||||
["", `--ram=${totalMem - expectedThreshold}`],
|
||||
["512", "--ram=512"],
|
||||
[undefined, false, `--ram=${expectedMemoryValue}`],
|
||||
["", false, `--ram=${expectedMemoryValue}`],
|
||||
["512", false, "--ram=512"],
|
||||
[undefined, true, `--ram=${expectedMemoryValueWithScaling}`],
|
||||
["", true, `--ram=${expectedMemoryValueWithScaling}`],
|
||||
];
|
||||
for (const [input, expectedFlag] of tests) {
|
||||
const flag = util.getMemoryFlag(input);
|
||||
for (const [input, withScaling, expectedFlag] of tests) {
|
||||
const features = (0, testing_utils_1.createFeatures)(withScaling ? [feature_flags_1.Feature.ScalingReservedRamEnabled] : []);
|
||||
const flag = await util.getMemoryFlag(input, features);
|
||||
t.deepEqual(flag, expectedFlag);
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("getMemoryFlag() throws if the ram input is < 0 or NaN", (t) => {
|
||||
(0, ava_1.default)("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => {
|
||||
for (const input of ["-1", "hello!"]) {
|
||||
t.throws(() => util.getMemoryFlag(input));
|
||||
await t.throwsAsync(async () => await util.getMemoryFlag(input, (0, testing_utils_1.createFeatures)([])));
|
||||
}
|
||||
});
|
||||
(0, ava_1.default)("getAddSnippetsFlag() should return the correct flag", (t) => {
|
||||
@@ -148,7 +155,7 @@ function mockGetMetaVersionHeader(versionHeader) {
|
||||
},
|
||||
};
|
||||
const spyGetContents = sinon
|
||||
.stub(client.meta, "get")
|
||||
.stub(client.rest.meta, "get")
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
.resolves(response);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
|
||||
File diff suppressed because one or more lines are too long
1
node_modules/.bin/is-inside-container
generated
vendored
Symbolic link
1
node_modules/.bin/is-inside-container
generated
vendored
Symbolic link
@@ -0,0 +1 @@
|
||||
../is-inside-container/cli.js
|
||||
2035
node_modules/.package-lock.json
generated
vendored
2035
node_modules/.package-lock.json
generated
vendored
File diff suppressed because it is too large
Load Diff
2
node_modules/word-wrap/LICENSE → node_modules/@aashutoshrathi/word-wrap/LICENSE
generated
vendored
2
node_modules/word-wrap/LICENSE → node_modules/@aashutoshrathi/word-wrap/LICENSE
generated
vendored
@@ -1,6 +1,6 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2017, Jon Schlinkert
|
||||
Copyright (c) 2014-2016, Jon Schlinkert
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
4
node_modules/word-wrap/README.md → node_modules/@aashutoshrathi/word-wrap/README.md
generated
vendored
4
node_modules/word-wrap/README.md → node_modules/@aashutoshrathi/word-wrap/README.md
generated
vendored
@@ -50,7 +50,7 @@ wrap(str, {width: 60});
|
||||
|
||||
Type: `String`
|
||||
|
||||
Default: `` (two spaces)
|
||||
Default: `` (none)
|
||||
|
||||
The string to use at the beginning of each line.
|
||||
|
||||
@@ -179,4 +179,4 @@ Released under the [MIT License](LICENSE).
|
||||
|
||||
***
|
||||
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 02, 2017._
|
||||
_This file was generated by [verb-generate-readme](https://github.com/verbose/verb-generate-readme), v0.6.0, on June 02, 2017._
|
||||
@@ -16,7 +16,7 @@ declare namespace wrap {
|
||||
|
||||
/**
|
||||
* The string to use at the beginning of each line.
|
||||
* @default ´ ´ (two spaces)
|
||||
* @default ´´ (none)
|
||||
*/
|
||||
indent?: string;
|
||||
|
||||
@@ -47,4 +47,4 @@ declare namespace wrap {
|
||||
*/
|
||||
cut?: boolean;
|
||||
}
|
||||
}
|
||||
}
|
||||
12
node_modules/word-wrap/index.js → node_modules/@aashutoshrathi/word-wrap/index.js
generated
vendored
12
node_modules/word-wrap/index.js → node_modules/@aashutoshrathi/word-wrap/index.js
generated
vendored
@@ -1,10 +1,16 @@
|
||||
/*!
|
||||
* word-wrap <https://github.com/jonschlinkert/word-wrap>
|
||||
*
|
||||
* Copyright (c) 2014-2017, Jon Schlinkert.
|
||||
* Copyright (c) 2014-2023, Jon Schlinkert.
|
||||
* Released under the MIT License.
|
||||
*/
|
||||
|
||||
function trimTabAndSpaces(str) {
|
||||
const lines = str.split('\n');
|
||||
const trimmedLines = lines.map((line) => line.trimEnd());
|
||||
return trimmedLines.join('\n');
|
||||
}
|
||||
|
||||
module.exports = function(str, options) {
|
||||
options = options || {};
|
||||
if (str == null) {
|
||||
@@ -14,7 +20,7 @@ module.exports = function(str, options) {
|
||||
var width = options.width || 50;
|
||||
var indent = (typeof options.indent === 'string')
|
||||
? options.indent
|
||||
: ' ';
|
||||
: '';
|
||||
|
||||
var newline = options.newline || '\n' + indent;
|
||||
var escape = typeof options.escape === 'function'
|
||||
@@ -36,7 +42,7 @@ module.exports = function(str, options) {
|
||||
}).join(newline);
|
||||
|
||||
if (options.trim === true) {
|
||||
result = result.replace(/[ \t]*$/gm, '');
|
||||
result = trimTabAndSpaces(result);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"name": "word-wrap",
|
||||
"name": "@aashutoshrathi/word-wrap",
|
||||
"description": "Wrap words to a specified length.",
|
||||
"version": "1.2.3",
|
||||
"homepage": "https://github.com/jonschlinkert/word-wrap",
|
||||
"version": "1.2.6",
|
||||
"homepage": "https://github.com/aashutoshrathi/word-wrap",
|
||||
"author": "Jon Schlinkert (https://github.com/jonschlinkert)",
|
||||
"contributors": [
|
||||
"Aashutosh Rathi <aashutoshrathi@gmail.com>",
|
||||
"Danilo Sampaio <danilo.sampaio@gmail.com> (localhost:8080)",
|
||||
"Fede Ramirez <i@2fd.me> (https://2fd.github.io)",
|
||||
"Joe Hildebrand <joe-github@cursive.net> (https://twitter.com/hildjj)",
|
||||
@@ -14,9 +15,12 @@
|
||||
"Wolfgang Faust (http://www.linestarve.com)",
|
||||
"Zach Hale <zachhale@gmail.com> (http://zachhale.com)"
|
||||
],
|
||||
"repository": "jonschlinkert/word-wrap",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/aashutoshrathi/word-wrap.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/jonschlinkert/word-wrap/issues"
|
||||
"url": "https://github.com/aashutoshrathi/word-wrap/issues"
|
||||
},
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -32,7 +36,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"gulp-format-md": "^0.1.11",
|
||||
"mocha": "^3.2.0"
|
||||
"mocha": "^10.2.0"
|
||||
},
|
||||
"keywords": [
|
||||
"break",
|
||||
7
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
7
node_modules/@actions/artifact/lib/internal/download-http-client.js
generated
vendored
@@ -172,6 +172,13 @@ class DownloadHttpClient {
|
||||
};
|
||||
const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () {
|
||||
destinationStream.close();
|
||||
// await until file is created at downloadpath; node15 and up fs.createWriteStream had not created a file yet
|
||||
yield new Promise(resolve => {
|
||||
destinationStream.on('close', resolve);
|
||||
if (destinationStream.writableFinished) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
yield utils_1.rmFile(fileDownloadPath);
|
||||
destinationStream = fs.createWriteStream(fileDownloadPath);
|
||||
});
|
||||
|
||||
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
2
node_modules/@actions/artifact/lib/internal/download-http-client.js.map
generated
vendored
File diff suppressed because one or more lines are too long
4
node_modules/@actions/artifact/package.json
generated
vendored
4
node_modules/@actions/artifact/package.json
generated
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@actions/artifact",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"preview": true,
|
||||
"description": "Actions artifact lib",
|
||||
"keywords": [
|
||||
@@ -37,7 +37,7 @@
|
||||
"url": "https://github.com/actions/toolkit/issues"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/core": "^1.9.1",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"tmp": "^0.2.1",
|
||||
"tmp-promise": "^3.0.2"
|
||||
|
||||
6
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
6
node_modules/@actions/cache/lib/cache.d.ts
generated
vendored
@@ -18,15 +18,17 @@ export declare function isFeatureAvailable(): boolean;
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions): Promise<string | undefined>;
|
||||
export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions, enableCrossOsArchive?: boolean): Promise<string | undefined>;
|
||||
/**
|
||||
* Saves a list of files with the specified key
|
||||
*
|
||||
* @param paths a list of file paths to be cached
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @param options cache upload options
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
export declare function saveCache(paths: string[], key: string, options?: UploadOptions): Promise<number>;
|
||||
export declare function saveCache(paths: string[], key: string, options?: UploadOptions, enableCrossOsArchive?: boolean): Promise<number>;
|
||||
|
||||
53
node_modules/@actions/cache/lib/cache.js
generated
vendored
53
node_modules/@actions/cache/lib/cache.js
generated
vendored
@@ -1,4 +1,27 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -8,14 +31,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const path = __importStar(require("path"));
|
||||
const utils = __importStar(require("./internal/cacheUtils"));
|
||||
@@ -67,9 +84,10 @@ exports.isFeatureAvailable = isFeatureAvailable;
|
||||
* @param primaryKey an explicit key for restoring the cache
|
||||
* @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
* @param downloadOptions cache download options
|
||||
* @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform
|
||||
* @returns string returns the key for the cache hit, otherwise returns undefined
|
||||
*/
|
||||
function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||
function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
checkPaths(paths);
|
||||
restoreKeys = restoreKeys || [];
|
||||
@@ -87,22 +105,27 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
||||
try {
|
||||
// path are needed to compute version
|
||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||
compressionMethod
|
||||
compressionMethod,
|
||||
enableCrossOsArchive
|
||||
});
|
||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||
// Cache not found
|
||||
return undefined;
|
||||
}
|
||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
||||
core.info('Lookup only - skipping download');
|
||||
return cacheEntry.cacheKey;
|
||||
}
|
||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
// Download the cache from the cache entry
|
||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||
yield (0, tar_1.extractTar)(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
return cacheEntry.cacheKey;
|
||||
}
|
||||
@@ -134,10 +157,11 @@ exports.restoreCache = restoreCache;
|
||||
*
|
||||
* @param paths a list of file paths to be cached
|
||||
* @param key an explicit key for restoring the cache
|
||||
* @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform
|
||||
* @param options cache upload options
|
||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||
*/
|
||||
function saveCache(paths, key, options) {
|
||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
checkPaths(paths);
|
||||
@@ -154,9 +178,9 @@ function saveCache(paths, key, options) {
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
try {
|
||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||
}
|
||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
@@ -168,6 +192,7 @@ function saveCache(paths, key, options) {
|
||||
core.debug('Reserving Cache');
|
||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||
compressionMethod,
|
||||
enableCrossOsArchive,
|
||||
cacheSize: archiveFileSize
|
||||
});
|
||||
if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) {
|
||||
|
||||
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
2
node_modules/@actions/cache/lib/cache.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts
generated
vendored
@@ -1,7 +1,7 @@
|
||||
import { CompressionMethod } from './constants';
|
||||
import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts';
|
||||
import { DownloadOptions, UploadOptions } from '../options';
|
||||
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod): string;
|
||||
export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod, enableCrossOsArchive?: boolean): string;
|
||||
export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise<ArtifactCacheEntry | null>;
|
||||
export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise<void>;
|
||||
export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise<ITypedResponseWithError<ReserveCacheResponse>>;
|
||||
|
||||
95
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
95
node_modules/@actions/cache/lib/internal/cacheHttpClient.js
generated
vendored
@@ -1,4 +1,27 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -8,14 +31,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const auth_1 = require("@actions/http-client/lib/auth");
|
||||
@@ -23,7 +40,6 @@ const crypto = __importStar(require("crypto"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const url_1 = require("url");
|
||||
const utils = __importStar(require("./cacheUtils"));
|
||||
const constants_1 = require("./constants");
|
||||
const downloadUtils_1 = require("./downloadUtils");
|
||||
const options_1 = require("../options");
|
||||
const requestUtils_1 = require("./requestUtils");
|
||||
@@ -53,10 +69,17 @@ function createHttpClient() {
|
||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod) {
|
||||
const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
|
||||
? []
|
||||
: [compressionMethod]);
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
components.push(compressionMethod);
|
||||
}
|
||||
// Only check for windows platforms if enableCrossOsArchive is false
|
||||
if (process.platform === 'win32' && !enableCrossOsArchive) {
|
||||
components.push('windows-only');
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
@@ -68,18 +91,24 @@ exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||
const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
// Cache not found
|
||||
if (response.statusCode === 204) {
|
||||
// List cache for primary key only if cache miss occurs
|
||||
if (core.isDebug()) {
|
||||
yield printCachesListForDiagnostics(keys[0], httpClient, version);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||
}
|
||||
const cacheResult = response.result;
|
||||
const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
|
||||
if (!cacheDownloadUrl) {
|
||||
// Cache achiveLocation not found. This should never happen, and hence bail out.
|
||||
throw new Error('Cache not found.');
|
||||
}
|
||||
core.setSecret(cacheDownloadUrl);
|
||||
@@ -89,18 +118,34 @@ function getCacheEntry(keys, paths, options) {
|
||||
});
|
||||
}
|
||||
exports.getCacheEntry = getCacheEntry;
|
||||
function printCachesListForDiagnostics(key, httpClient, version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const resource = `caches?key=${encodeURIComponent(key)}`;
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||
if (response.statusCode === 200) {
|
||||
const cacheListResult = response.result;
|
||||
const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount;
|
||||
if (totalCount && totalCount > 0) {
|
||||
core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`);
|
||||
for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) {
|
||||
core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = options_1.getDownloadOptions(options);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -109,13 +154,13 @@ exports.downloadCache = downloadCache;
|
||||
function reserveCache(key, paths, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
|
||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||
const reserveCacheRequest = {
|
||||
key,
|
||||
version,
|
||||
cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize
|
||||
};
|
||||
const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
|
||||
}));
|
||||
return response;
|
||||
@@ -139,10 +184,10 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
};
|
||||
const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
||||
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
|
||||
}));
|
||||
if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
||||
}
|
||||
});
|
||||
@@ -153,7 +198,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, 'r');
|
||||
const uploadOptions = options_1.getUploadOptions(options);
|
||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
||||
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
|
||||
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
|
||||
const parallelUploads = [...new Array(concurrency).keys()];
|
||||
@@ -188,7 +233,7 @@ function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||
function commitCache(httpClient, cacheId, filesize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const commitCacheRequest = { size: filesize };
|
||||
return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
||||
}));
|
||||
});
|
||||
@@ -203,7 +248,7 @@ function saveCache(cacheId, archivePath, options) {
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||
}
|
||||
core.info('Cache saved successfully');
|
||||
|
||||
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map
generated
vendored
File diff suppressed because one or more lines are too long
2
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheUtils.d.ts
generated
vendored
@@ -7,6 +7,6 @@ export declare function resolvePaths(patterns: string[]): Promise<string[]>;
|
||||
export declare function unlinkFile(filePath: fs.PathLike): Promise<void>;
|
||||
export declare function getCompressionMethod(): Promise<CompressionMethod>;
|
||||
export declare function getCacheFileName(compressionMethod: CompressionMethod): string;
|
||||
export declare function isGnuTarInstalled(): Promise<boolean>;
|
||||
export declare function getGnuTarPathOnWindows(): Promise<string>;
|
||||
export declare function assertDefined<T>(name: string, value?: T): T;
|
||||
export declare function isGhes(): boolean;
|
||||
|
||||
66
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
66
node_modules/@actions/cache/lib/internal/cacheUtils.js
generated
vendored
@@ -1,4 +1,27 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -15,14 +38,8 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const exec = __importStar(require("@actions/exec"));
|
||||
const glob = __importStar(require("@actions/glob"));
|
||||
@@ -54,7 +71,7 @@ function createTempDirectory() {
|
||||
}
|
||||
tempDirectory = path.join(baseLocation, 'actions', 'temp');
|
||||
}
|
||||
const dest = path.join(tempDirectory, uuid_1.v4());
|
||||
const dest = path.join(tempDirectory, (0, uuid_1.v4)());
|
||||
yield io.mkdirP(dest);
|
||||
return dest;
|
||||
});
|
||||
@@ -107,12 +124,13 @@ function unlinkFile(filePath) {
|
||||
});
|
||||
}
|
||||
exports.unlinkFile = unlinkFile;
|
||||
function getVersion(app) {
|
||||
function getVersion(app, additionalArgs = []) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Checking ${app} --version`);
|
||||
let versionOutput = '';
|
||||
additionalArgs.push('--version');
|
||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
||||
try {
|
||||
yield exec.exec(`${app} --version`, [], {
|
||||
yield exec.exec(`${app}`, additionalArgs, {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
listeners: {
|
||||
@@ -132,23 +150,14 @@ function getVersion(app) {
|
||||
// Use zstandard if possible to maximize cache performance
|
||||
function getCompressionMethod() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||
return constants_1.CompressionMethod.Gzip;
|
||||
}
|
||||
const versionOutput = yield getVersion('zstd');
|
||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
||||
const version = semver.clean(versionOutput);
|
||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||
// zstd is not installed
|
||||
core.debug(`zstd version: ${version}`);
|
||||
if (versionOutput === '') {
|
||||
return constants_1.CompressionMethod.Gzip;
|
||||
}
|
||||
else if (!version || semver.lt(version, 'v1.3.2')) {
|
||||
// zstd is installed but using a version earlier than v1.3.2
|
||||
// v1.3.2 is required to use the `--long` options in zstd
|
||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||
}
|
||||
else {
|
||||
return constants_1.CompressionMethod.Zstd;
|
||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -159,13 +168,16 @@ function getCacheFileName(compressionMethod) {
|
||||
: constants_1.CacheFilename.Zstd;
|
||||
}
|
||||
exports.getCacheFileName = getCacheFileName;
|
||||
function isGnuTarInstalled() {
|
||||
function getGnuTarPathOnWindows() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||
return constants_1.GnuTarPathOnWindows;
|
||||
}
|
||||
const versionOutput = yield getVersion('tar');
|
||||
return versionOutput.toLowerCase().includes('gnu tar');
|
||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
||||
});
|
||||
}
|
||||
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
||||
function assertDefined(name, value) {
|
||||
if (value === undefined) {
|
||||
throw Error(`Expected ${name} but value was undefiend`);
|
||||
|
||||
2
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/cacheUtils.js.map
generated
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAA4D;AAE5D,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,SAAM,EAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,IAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA;gBAArC,MAAM,IAAI,WAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CAAC,GAAW;;QACnC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC,CAAA;QACvC,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,YAAY,EAAE,EAAE,EAAE;gBACtC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,CAAC,MAAM,iBAAiB,EAAE,CAAC,EAAE;YAChE,sEAAsE;YACtE,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;QAED,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAE3C,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,6BAA6B,CAAC,EAAE;YACxE,wBAAwB;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM,IAAI,CAAC,OAAO,IAAI,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;YACnD,4DAA4D;YAC5D,yDAAyD;YACzD,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;aAAM;YACL,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;IACH,CAAC;CAAA;AAnBD,oDAmBC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,iBAAiB;;QACrC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAAA;AAHD,8CAGC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IACD,OAAO,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,YAAY,CAAA;AACtD,CAAC;AALD,wBAKC"}
|
||||
{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAIoB;AAEpB,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,IAAA,SAAM,GAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,IAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA;gBAArC,MAAM,IAAI,WAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,IAAI,YAAY,KAAK,EAAE,EAAE;oBACvB,qEAAqE;oBACrE,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;iBAChB;qBAAM;oBACL,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;iBAC9B;aACF;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAtBD,oCAsBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CACvB,GAAW,EACX,iBAA2B,EAAE;;QAE7B,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QACzD,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,EAAE,EAAE,cAAc,EAAE;gBACxC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC,CAAA;QAC3D,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAC3C,IAAI,CAAC,KAAK,CAAC,iBAAiB,OAAO,EAAE,CAAC,CAAA;QAEtC,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM;YACL,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;IACH,CAAC;CAAA;AAVD,oDAUC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,sBAAsB;;QAC1C,IAAI,EAAE,CAAC,UAAU,CAAC,+BAAmB,CAAC,EAAE;YACtC,OAAO,+BAAmB,CAAA;SAC3B;QACD,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAA;IAC/E,CAAC;CAAA;AAND,wDAMC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IACD,OAAO,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,YAAY,CAAA;AACtD,CAAC;AALD,wBAKC"}
|
||||
8
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
8
node_modules/@actions/cache/lib/internal/constants.d.ts
generated
vendored
@@ -7,6 +7,14 @@ export declare enum CompressionMethod {
|
||||
ZstdWithoutLong = "zstd-without-long",
|
||||
Zstd = "zstd"
|
||||
}
|
||||
export declare enum ArchiveToolType {
|
||||
GNU = "gnu",
|
||||
BSD = "bsd"
|
||||
}
|
||||
export declare const DefaultRetryAttempts = 2;
|
||||
export declare const DefaultRetryDelay = 5000;
|
||||
export declare const SocketTimeout = 5000;
|
||||
export declare const GnuTarPathOnWindows: string;
|
||||
export declare const SystemTarPathOnWindows: string;
|
||||
export declare const TarFilename = "cache.tar";
|
||||
export declare const ManifestFilename = "manifest.txt";
|
||||
|
||||
12
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
12
node_modules/@actions/cache/lib/internal/constants.js
generated
vendored
@@ -1,5 +1,6 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0;
|
||||
var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
@@ -13,6 +14,11 @@ var CompressionMethod;
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@@ -21,4 +27,10 @@ exports.DefaultRetryDelay = 5000;
|
||||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
exports.SocketTimeout = 5000;
|
||||
// The default path of GNUtar on hosted Windows runners
|
||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
||||
// The default path of BSDtar on hosted Windows runners
|
||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
||||
exports.TarFilename = 'cache.tar';
|
||||
exports.ManifestFilename = 'manifest.txt';
|
||||
//# sourceMappingURL=constants.js.map
|
||||
2
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/constants.js.map
generated
vendored
@@ -1 +1 @@
|
||||
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAM5B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA"}
|
||||
{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAM5B;AAED,IAAY,eAGX;AAHD,WAAY,eAAe;IACzB,8BAAW,CAAA;IACX,8BAAW,CAAA;AACb,CAAC,EAHW,eAAe,GAAf,uBAAe,KAAf,uBAAe,QAG1B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA;AAEjC,uDAAuD;AAC1C,QAAA,mBAAmB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,0BAA0B,CAAA;AAE3F,uDAAuD;AAC1C,QAAA,sBAAsB,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,8BAA8B,CAAA;AAEpF,QAAA,WAAW,GAAG,WAAW,CAAA;AAEzB,QAAA,gBAAgB,GAAG,cAAc,CAAA"}
|
||||
36
node_modules/@actions/cache/lib/internal/downloadUtils.js
generated
vendored
36
node_modules/@actions/cache/lib/internal/downloadUtils.js
generated
vendored
@@ -1,4 +1,27 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -8,14 +31,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const storage_blob_1 = require("@azure/storage-blob");
|
||||
@@ -150,7 +167,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const writeStream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache');
|
||||
const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
||||
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
||||
// Abort download if no traffic received over the socket.
|
||||
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||
downloadResponse.message.destroy();
|
||||
@@ -205,7 +222,8 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
||||
// If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
|
||||
// on 64-bit systems), split the download into multiple segments
|
||||
// ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly.
|
||||
const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH);
|
||||
// Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast
|
||||
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
|
||||
const downloadProgress = new DownloadProgress(contentLength);
|
||||
const fd = fs.openSync(archivePath, 'w');
|
||||
try {
|
||||
|
||||
2
node_modules/@actions/cache/lib/internal/downloadUtils.js.map
generated
vendored
2
node_modules/@actions/cache/lib/internal/downloadUtils.js.map
generated
vendored
File diff suppressed because one or more lines are too long
31
node_modules/@actions/cache/lib/internal/requestUtils.js
generated
vendored
31
node_modules/@actions/cache/lib/internal/requestUtils.js
generated
vendored
@@ -1,4 +1,27 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
@@ -8,14 +31,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const http_client_1 = require("@actions/http-client");
|
||||
const constants_1 = require("./constants");
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user